llama.cpp/Dockerfile.cuda

17 lines
443 B
Text
Raw Normal View History

ARG CUDA_VERSION=12.1.1
FROM nvidia/cuda:${CUDA_VERSION}-devel-ubuntu22.04
2023-05-03 14:29:05 +00:00
# We need to set the host to 0.0.0.0 to allow outside access
ENV HOST 0.0.0.0
COPY . .
# Install the package
RUN apt update && apt install -y python3 python3-pip
RUN python3 -m pip install --upgrade pip pytest cmake scikit-build setuptools fastapi uvicorn sse-starlette
RUN LLAMA_CUBLAS=1 python3 setup.py develop
# Run the server
CMD python3 -m llama_cpp.server