From ca11673061ecd9198b4800f68073ae14d4440ecd Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Fri, 7 Jul 2023 03:38:51 -0400 Subject: [PATCH] Add universal docker image --- Makefile | 7 +++++++ docker/simple/Dockerfile | 33 +++++++++++++++++++++++++++++++++ docker/simple/run.sh | 4 ++++ 3 files changed, 44 insertions(+) create mode 100644 docker/simple/Dockerfile create mode 100644 docker/simple/run.sh diff --git a/Makefile b/Makefile index 1be35cf..c359260 100644 --- a/Makefile +++ b/Makefile @@ -36,6 +36,12 @@ deploy.gh-docs: test: python3 -m pytest +docker: + docker build -t llama-cpp-python:latest -f docker/simple/Dockerfile . + +run-server: + uvicorn --factory llama.server:app --host ${HOST} --port ${PORT} + clean: - cd vendor/llama.cpp && make clean - cd vendor/llama.cpp && rm libllama.so @@ -56,4 +62,5 @@ clean: build.sdist \ deploy.pypi \ deploy.gh-docs \ + docker \ clean \ No newline at end of file diff --git a/docker/simple/Dockerfile b/docker/simple/Dockerfile new file mode 100644 index 0000000..ad36b98 --- /dev/null +++ b/docker/simple/Dockerfile @@ -0,0 +1,33 @@ +# Define the image argument and provide a default value +ARG IMAGE=python:3-slim-bullseye + +# Use the image as specified +FROM ${IMAGE} + +# Re-declare the ARG after FROM +ARG IMAGE + +# Update and upgrade the existing packages +RUN apt-get update && apt-get upgrade -y && apt-get install -y --no-install-recommends \ + python3 \ + python3-pip \ + ninja-build \ + build-essential + +RUN mkdir /app +WORKDIR /app +COPY . /app + +RUN python3 -m pip install --upgrade pip pytest cmake scikit-build setuptools fastapi uvicorn sse-starlette + +RUN make build && make clean + +# Set environment variable for the host +ENV HOST=0.0.0.0 +ENV PORT=8000 + +# Expose a port for the server +EXPOSE 8000 + +# Run the server start script +CMD ["/bin/sh", "/app/docker/simple/run.sh"] diff --git a/docker/simple/run.sh b/docker/simple/run.sh new file mode 100644 index 0000000..c85e73d --- /dev/null +++ b/docker/simple/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +make build +uvicorn --factory llama_cpp.server.app:create_app --host $HOST --port $PORT