llama.cpp/Dockerfile.cuda
2023-05-03 10:29:05 -04:00

15 lines
No EOL
409 B
Docker

FROM nvidia/cuda:12.1.1-devel-ubuntu20.04
# We need to set the host to 0.0.0.0 to allow outside access
ENV HOST 0.0.0.0
COPY . .
# Install the package
RUN apt update && apt install -y python3 python3-pip
RUN python3 -m pip install --upgrade pip pytest cmake scikit-build setuptools fastapi uvicorn sse-starlette
RUN LLAMA_CUBLAS=1 python3 setup.py develop
# Run the server
CMD python3 -m llama_cpp.server