Migrate to scikit-build-core. Closes #489
This commit is contained in:
parent
c9985abc03
commit
6cb77a20c6
13 changed files with 98 additions and 1765 deletions
8
.github/workflows/build-and-release.yaml
vendored
8
.github/workflows/build-and-release.yaml
vendored
|
@ -26,7 +26,8 @@ jobs:
|
|||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip pytest cmake scikit-build setuptools
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install -e .[all]
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
|
@ -46,10 +47,11 @@ jobs:
|
|||
- uses: actions/setup-python@v3
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip pytest cmake scikit-build setuptools
|
||||
python -m pip install --upgrade pip build
|
||||
python -m pip install -e .[all]
|
||||
- name: Build source distribution
|
||||
run: |
|
||||
python setup.py sdist
|
||||
python -m build --sdist
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: ./dist/*.tar.gz
|
||||
|
|
5
.github/workflows/publish-to-test.yaml
vendored
5
.github/workflows/publish-to-test.yaml
vendored
|
@ -19,10 +19,11 @@ jobs:
|
|||
python-version: "3.8"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip pytest cmake scikit-build setuptools
|
||||
python3 -m pip install --upgrade pip build
|
||||
python3 -m pip install -e .[all]
|
||||
- name: Build source distribution
|
||||
run: |
|
||||
python setup.py sdist
|
||||
python3 -m build --sdist
|
||||
- name: Publish to Test PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
|
|
5
.github/workflows/publish.yaml
vendored
5
.github/workflows/publish.yaml
vendored
|
@ -19,10 +19,11 @@ jobs:
|
|||
python-version: "3.8"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip pytest cmake scikit-build setuptools
|
||||
python3 -m pip install --upgrade pip build
|
||||
python3 -m pip install -e .[all]
|
||||
- name: Build source distribution
|
||||
run: |
|
||||
python setup.py sdist
|
||||
python3 -m build --sdist
|
||||
- name: Publish distribution to PyPI
|
||||
# TODO: move to tag based releases
|
||||
# if: startsWith(github.ref, 'refs/tags')
|
||||
|
|
6
.github/workflows/test-pypi.yaml
vendored
6
.github/workflows/test-pypi.yaml
vendored
|
@ -18,7 +18,7 @@ jobs:
|
|||
- name: Install dependencies
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --verbose llama-cpp-python[server,test]
|
||||
python3 -m pip install --verbose llama-cpp-python[all]
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python3 -c "import llama_cpp"
|
||||
|
@ -38,7 +38,7 @@ jobs:
|
|||
- name: Install dependencies
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --verbose llama-cpp-python[server,test]
|
||||
python3 -m pip install --verbose llama-cpp-python[all]
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python3 -c "import llama_cpp"
|
||||
|
@ -58,7 +58,7 @@ jobs:
|
|||
- name: Install dependencies
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install --verbose llama-cpp-python[server,test]
|
||||
python3 -m pip install --verbose llama-cpp-python[all]
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python3 -c "import llama_cpp"
|
18
.github/workflows/test.yaml
vendored
18
.github/workflows/test.yaml
vendored
|
@ -26,11 +26,11 @@ jobs:
|
|||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip pytest cmake scikit-build setuptools fastapi sse-starlette httpx uvicorn pydantic-settings
|
||||
pip install . -v
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install .[all] -v
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pytest
|
||||
python3 -m pytest
|
||||
|
||||
build-windows:
|
||||
|
||||
|
@ -49,11 +49,11 @@ jobs:
|
|||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip pytest cmake scikit-build setuptools fastapi sse-starlette httpx uvicorn pydantic-settings
|
||||
pip install . -v
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install .[all] -v
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pytest
|
||||
python3 -m pytest
|
||||
|
||||
build-macos:
|
||||
|
||||
|
@ -72,8 +72,8 @@ jobs:
|
|||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip pytest cmake scikit-build setuptools fastapi sse-starlette httpx uvicorn pydantic-settings
|
||||
pip install . -v
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install .[all] -v
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pytest
|
||||
python3 -m pytest
|
|
@ -2,33 +2,27 @@ cmake_minimum_required(VERSION 3.4...3.22)
|
|||
|
||||
project(llama_cpp)
|
||||
|
||||
option(FORCE_CMAKE "Force CMake build of Python bindings" OFF)
|
||||
option(BUILD_LLAMA_CPP "Build llama.cpp shared library and install alongside python package" ON)
|
||||
|
||||
set(FORCE_CMAKE $ENV{FORCE_CMAKE})
|
||||
|
||||
if (UNIX AND NOT FORCE_CMAKE)
|
||||
add_custom_command(
|
||||
OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/vendor/llama.cpp/libllama.so
|
||||
COMMAND make libllama.so
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/vendor/llama.cpp
|
||||
)
|
||||
add_custom_target(
|
||||
run ALL
|
||||
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/vendor/llama.cpp/libllama.so
|
||||
)
|
||||
install(
|
||||
FILES ${CMAKE_CURRENT_SOURCE_DIR}/vendor/llama.cpp/libllama.so
|
||||
DESTINATION llama_cpp
|
||||
)
|
||||
else()
|
||||
if (BUILD_LLAMA_CPP)
|
||||
set(BUILD_SHARED_LIBS "On")
|
||||
add_subdirectory(vendor/llama.cpp)
|
||||
install(
|
||||
TARGETS llama
|
||||
LIBRARY DESTINATION llama_cpp
|
||||
RUNTIME DESTINATION llama_cpp
|
||||
ARCHIVE DESTINATION llama_cpp
|
||||
FRAMEWORK DESTINATION llama_cpp
|
||||
RESOURCE DESTINATION llama_cpp
|
||||
LIBRARY DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
||||
RUNTIME DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
||||
ARCHIVE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
||||
FRAMEWORK DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
||||
RESOURCE DESTINATION ${SKBUILD_PLATLIB_DIR}/llama_cpp
|
||||
)
|
||||
# Temporary fix for https://github.com/scikit-build/scikit-build-core/issues/374
|
||||
install(
|
||||
TARGETS llama
|
||||
LIBRARY DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
||||
RUNTIME DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
||||
ARCHIVE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
||||
FRAMEWORK DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
||||
RESOURCE DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}/llama_cpp
|
||||
)
|
||||
endif()
|
||||
|
|
18
Makefile
18
Makefile
|
@ -5,26 +5,30 @@ update:
|
|||
update.vendor:
|
||||
cd vendor/llama.cpp && git pull origin master
|
||||
|
||||
deps:
|
||||
python3 -m pip install pip
|
||||
python3 -m pip install -e ".[all]"
|
||||
|
||||
build:
|
||||
python3 setup.py develop
|
||||
python3 -m pip install -e .
|
||||
|
||||
build.cuda:
|
||||
CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 python3 setup.py develop
|
||||
CMAKE_ARGS="-DLLAMA_CUBLAS=on" FORCE_CMAKE=1 python3 -m pip install -e .
|
||||
|
||||
build.opencl:
|
||||
CMAKE_ARGS="-DLLAMA_CLBLAST=on" FORCE_CMAKE=1 python3 setup.py develop
|
||||
CMAKE_ARGS="-DLLAMA_CLBLAST=on" FORCE_CMAKE=1 python3 -m pip install -e .
|
||||
|
||||
build.openblas:
|
||||
CMAKE_ARGS="-DLLAMA_OPENBLAS=on" FORCE_CMAKE=1 python3 setup.py develop
|
||||
CMAKE_ARGS="-DLLAMA_CLBLAST=on" FORCE_CMAKE=1 python3 -m pip install -e .
|
||||
|
||||
build.blis:
|
||||
CMAKE_ARGS="-DLLAMA_OPENBLAS=on -DLLAMA_OPENBLAS_VENDOR=blis" FORCE_CMAKE=1 python3 setup.py develop
|
||||
CMAKE_ARGS="-DLLAMA_OPENBLAS=on -DLLAMA_OPENBLAS_VENDOR=blis" FORCE_CMAKE=1 python3 -m pip install -e .
|
||||
|
||||
build.metal:
|
||||
CMAKE_ARGS="-DLLAMA_METAL=on" FORCE_CMAKE=1 python3 setup.py develop
|
||||
CMAKE_ARGS="-DLLAMA_METAL=on" FORCE_CMAKE=1 python3 -m pip install -e .
|
||||
|
||||
build.sdist:
|
||||
python3 setup.py sdist
|
||||
python3 -m build --sdist
|
||||
|
||||
deploy.pypi:
|
||||
python3 -m twine upload dist/*
|
||||
|
|
|
@ -180,13 +180,6 @@ pip install -e .
|
|||
|
||||
# if you want to use the fastapi / openapi server
|
||||
pip install -e .[server]
|
||||
|
||||
# If you're a poetry user, installing will also include a virtual environment
|
||||
poetry install --all-extras
|
||||
. .venv/bin/activate
|
||||
|
||||
# Will need to be re-run any time vendor/llama.cpp is updated
|
||||
python3 setup.py develop
|
||||
```
|
||||
|
||||
# How does this compare to other Python bindings of `llama.cpp`?
|
||||
|
|
|
@ -19,9 +19,9 @@ RUN mkdir /app
|
|||
WORKDIR /app
|
||||
COPY . /app
|
||||
|
||||
RUN python3 -m pip install --upgrade pip pytest cmake scikit-build setuptools fastapi uvicorn sse-starlette pydantic-settings
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
|
||||
RUN make build && make clean
|
||||
RUN make deps && make build && make clean
|
||||
|
||||
# Set environment variable for the host
|
||||
ENV HOST=0.0.0.0
|
||||
|
|
1636
poetry.lock
generated
1636
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -1,3 +0,0 @@
|
|||
[virtualenvs]
|
||||
in-project = true
|
||||
prefer-active-python = true
|
|
@ -1,44 +1,53 @@
|
|||
[tool.poetry]
|
||||
[build-system]
|
||||
requires = ["scikit-build-core>=0.4.5"]
|
||||
build-backend = "scikit_build_core.build"
|
||||
|
||||
[project]
|
||||
name = "llama_cpp_python"
|
||||
version = "0.1.73"
|
||||
description = "Python bindings for the llama.cpp library"
|
||||
authors = ["Andrei Betlen <abetlen@gmail.com>"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/abetlen/llama-cpp-python"
|
||||
repository = "https://github.com/abetlen/llama-cpp-python"
|
||||
packages = [{include = "llama_cpp"}]
|
||||
include = [
|
||||
"LICENSE.md",
|
||||
license = { text = "MIT" }
|
||||
authors = [
|
||||
{ name = "Andrei Betlen", email = "abetlen@gmail.com" },
|
||||
]
|
||||
dependencies = [
|
||||
"typing-extensions>=4.7.1",
|
||||
"numpy>=1.24.4",
|
||||
"diskcache>=5.6.1",
|
||||
]
|
||||
requires-python = ">=3.7"
|
||||
|
||||
|
||||
[project.optional-dependencies]
|
||||
server = [
|
||||
"uvicorn>=0.22.0",
|
||||
"fastapi>=0.100.0",
|
||||
"pydantic-settings>=2.0.1",
|
||||
"sse-starlette>=1.6.1",
|
||||
]
|
||||
test = [
|
||||
"pytest>=7.4.0",
|
||||
"httpx>=0.24.1",
|
||||
]
|
||||
dev = [
|
||||
"black>=23.3.0",
|
||||
"twine>=4.0.2",
|
||||
"mkdocs>=1.4.3",
|
||||
"mkdocstrings[python]>=0.22.0",
|
||||
"mkdocs-material>=9.1.18",
|
||||
"pytest>=7.4.0",
|
||||
"httpx>=0.24.1",
|
||||
]
|
||||
all = [
|
||||
"llama_cpp_python[server,test,dev]",
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8.1"
|
||||
typing-extensions = "^4.7.1"
|
||||
numpy = "^1.24.4"
|
||||
diskcache = "^5.6.1"
|
||||
uvicorn = { version = "^0.22.0", optional = true }
|
||||
fastapi = { version = "^0.99.1", optional = true }
|
||||
sse-starlette = { version = "^1.6.1", optional = true }
|
||||
[tool.scikit-build]
|
||||
wheel.packages = ["llama_cpp"]
|
||||
wheel.expand-macos-universal-tags = true
|
||||
cmake.verbose = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.3.0"
|
||||
twine = "^4.0.2"
|
||||
mkdocs = "^1.4.3"
|
||||
mkdocstrings = {extras = ["python"], version = "^0.22.0"}
|
||||
mkdocs-material = "^9.1.18"
|
||||
pytest = "^7.4.0"
|
||||
httpx = "^0.24.1"
|
||||
scikit-build = "0.17.6"
|
||||
|
||||
[tool.poetry.extras]
|
||||
server = ["uvicorn>=0.22.0", "fastapi>=0.100.0", "pydantic-settings>=2.0.1", "sse-starlette>=1.6.1"]
|
||||
|
||||
[build-system]
|
||||
requires = [
|
||||
"setuptools>=42",
|
||||
"scikit-build>=0.13",
|
||||
"cmake>=3.18",
|
||||
"ninja",
|
||||
]
|
||||
build-backend = "setuptools.build_meta"
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/abetlen/llama-cpp-python"
|
||||
Issues = "https://github.com/abetlen/llama-cpp-python/issues"
|
||||
|
|
32
setup.py
32
setup.py
|
@ -1,32 +0,0 @@
|
|||
from skbuild import setup
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
this_directory = Path(__file__).parent
|
||||
long_description = (this_directory / "README.md").read_text(encoding="utf-8")
|
||||
|
||||
setup(
|
||||
name="llama_cpp_python",
|
||||
description="A Python wrapper for llama.cpp",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
version="0.1.73",
|
||||
author="Andrei Betlen",
|
||||
author_email="abetlen@gmail.com",
|
||||
license="MIT",
|
||||
package_dir={"llama_cpp": "llama_cpp", "llama_cpp.server": "llama_cpp/server"},
|
||||
packages=["llama_cpp", "llama_cpp.server"],
|
||||
install_requires=["typing-extensions>=4.5.0", "numpy>=1.20.0", "diskcache>=5.6.1"],
|
||||
extras_require={
|
||||
"server": ["uvicorn>=0.22.0", "fastapi>=0.100.0", "pydantic-settings>=2.0.1", "sse-starlette>=1.6.1"],
|
||||
},
|
||||
python_requires=">=3.7",
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
],
|
||||
)
|
Loading…
Reference in a new issue