2023-05-26 21:56:20 +00:00
|
|
|
update:
|
|
|
|
poetry install
|
|
|
|
git submodule update --init --recursive
|
|
|
|
|
|
|
|
update.vendor:
|
|
|
|
cd vendor/llama.cpp && git pull origin master
|
|
|
|
|
2023-07-18 22:52:29 +00:00
|
|
|
deps:
|
2023-09-14 06:01:45 +00:00
|
|
|
python3 -m pip install --upgrade pip
|
2023-07-18 22:52:29 +00:00
|
|
|
python3 -m pip install -e ".[all]"
|
|
|
|
|
2023-05-26 21:56:20 +00:00
|
|
|
build:
|
2024-01-19 13:47:56 +00:00
|
|
|
python3 -m pip install --verbose -e .
|
2023-05-26 21:56:20 +00:00
|
|
|
|
2024-02-21 16:04:30 +00:00
|
|
|
build.debug:
|
2024-05-29 02:52:28 +00:00
|
|
|
python3 -m pip install \
|
|
|
|
--verbose \
|
|
|
|
--config-settings=cmake.verbose=true \
|
|
|
|
--config-settings=logging.level=INFO \
|
|
|
|
--config-settings=install.strip=false \
|
|
|
|
--config-settings=cmake.args="-DCMAKE_BUILD_TYPE=Debug;-DCMAKE_C_FLAGS='-ggdb -O0';-DCMAKE_CXX_FLAGS='-ggdb -O0'" \
|
|
|
|
--editable .
|
2024-02-21 16:04:30 +00:00
|
|
|
|
2023-05-26 21:56:20 +00:00
|
|
|
build.cuda:
|
2024-05-10 14:18:47 +00:00
|
|
|
CMAKE_ARGS="-DLLAMA_CUDA=on" python3 -m pip install --verbose -e .
|
2023-05-26 21:56:20 +00:00
|
|
|
|
|
|
|
build.opencl:
|
2024-01-19 13:47:56 +00:00
|
|
|
CMAKE_ARGS="-DLLAMA_CLBLAST=on" python3 -m pip install --verbose -e .
|
2023-05-26 21:56:20 +00:00
|
|
|
|
|
|
|
build.openblas:
|
2024-02-14 08:47:40 +00:00
|
|
|
CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS" python3 -m pip install --verbose -e .
|
2023-05-26 21:56:20 +00:00
|
|
|
|
|
|
|
build.blis:
|
2024-02-14 08:47:40 +00:00
|
|
|
CMAKE_ARGS="-DLLAMA_BLAS=on -DLLAMA_BLAS_VENDOR=FLAME" python3 -m pip install --verbose -e .
|
2023-05-26 21:56:20 +00:00
|
|
|
|
2023-06-08 04:22:39 +00:00
|
|
|
build.metal:
|
2024-01-19 13:47:56 +00:00
|
|
|
CMAKE_ARGS="-DLLAMA_METAL=on" python3 -m pip install --verbose -e .
|
2023-06-08 04:22:39 +00:00
|
|
|
|
2024-01-29 15:39:23 +00:00
|
|
|
build.vulkan:
|
|
|
|
CMAKE_ARGS="-DLLAMA_VULKAN=on" python3 -m pip install --verbose -e .
|
|
|
|
|
2024-01-30 14:48:09 +00:00
|
|
|
build.kompute:
|
|
|
|
CMAKE_ARGS="-DLLAMA_KOMPUTE=on" python3 -m pip install --verbose -e .
|
|
|
|
|
|
|
|
build.sycl:
|
|
|
|
CMAKE_ARGS="-DLLAMA_SYCL=on" python3 -m pip install --verbose -e .
|
|
|
|
|
2024-06-04 14:38:21 +00:00
|
|
|
build.rpc:
|
|
|
|
CMAKE_ARGS="-DLLAMA_RPC=on" python3 -m pip install --verbose -e .
|
|
|
|
|
2023-05-26 21:56:20 +00:00
|
|
|
build.sdist:
|
2023-07-18 22:52:29 +00:00
|
|
|
python3 -m build --sdist
|
2023-05-26 21:56:20 +00:00
|
|
|
|
|
|
|
deploy.pypi:
|
|
|
|
python3 -m twine upload dist/*
|
|
|
|
|
|
|
|
deploy.gh-docs:
|
|
|
|
mkdocs build
|
|
|
|
mkdocs gh-deploy
|
|
|
|
|
2023-07-06 21:57:56 +00:00
|
|
|
test:
|
|
|
|
python3 -m pytest
|
|
|
|
|
2023-07-07 07:38:51 +00:00
|
|
|
docker:
|
|
|
|
docker build -t llama-cpp-python:latest -f docker/simple/Dockerfile .
|
|
|
|
|
|
|
|
run-server:
|
|
|
|
uvicorn --factory llama.server:app --host ${HOST} --port ${PORT}
|
|
|
|
|
2023-05-26 21:56:20 +00:00
|
|
|
clean:
|
|
|
|
- cd vendor/llama.cpp && make clean
|
|
|
|
- cd vendor/llama.cpp && rm libllama.so
|
2023-06-10 03:23:16 +00:00
|
|
|
- rm -rf _skbuild
|
2023-06-08 04:40:04 +00:00
|
|
|
- rm llama_cpp/*.so
|
|
|
|
- rm llama_cpp/*.dylib
|
2023-06-10 22:17:34 +00:00
|
|
|
- rm llama_cpp/*.metal
|
2023-06-08 04:40:04 +00:00
|
|
|
- rm llama_cpp/*.dll
|
2023-06-10 22:17:34 +00:00
|
|
|
- rm llama_cpp/*.lib
|
2023-05-26 21:56:20 +00:00
|
|
|
|
|
|
|
.PHONY: \
|
|
|
|
update \
|
|
|
|
update.vendor \
|
|
|
|
build \
|
|
|
|
build.cuda \
|
|
|
|
build.opencl \
|
|
|
|
build.openblas \
|
|
|
|
build.sdist \
|
|
|
|
deploy.pypi \
|
|
|
|
deploy.gh-docs \
|
2023-07-07 07:38:51 +00:00
|
|
|
docker \
|
2023-05-26 21:56:20 +00:00
|
|
|
clean
|