llama.cpp/pyproject.toml
2023-07-18 19:30:06 -04:00

53 lines
1.1 KiB
TOML

[build-system]
requires = ["scikit-build-core>=0.4.5"]
build-backend = "scikit_build_core.build"
[project]
name = "llama_cpp_python"
version = "0.1.73"
description = "Python bindings for the llama.cpp library"
readme = "README.md"
license = { text = "MIT" }
authors = [
{ name = "Andrei Betlen", email = "abetlen@gmail.com" },
]
dependencies = [
"typing-extensions>=4.5.0",
"numpy>=1.20.0",
"diskcache>=5.6.1",
]
requires-python = ">=3.7"
[project.optional-dependencies]
server = [
"uvicorn>=0.22.0",
"fastapi>=0.100.0",
"pydantic-settings>=2.0.1",
"sse-starlette>=1.6.1",
]
test = [
"pytest>=7.4.0",
"httpx>=0.24.1",
]
dev = [
"black>=23.3.0",
"twine>=4.0.2",
"mkdocs>=1.4.3",
"mkdocstrings[python]>=0.22.0",
"mkdocs-material>=9.1.18",
"pytest>=7.4.0",
"httpx>=0.24.1",
]
all = [
"llama_cpp_python[server,test,dev]",
]
[tool.scikit-build]
wheel.packages = ["llama_cpp"]
wheel.expand-macos-universal-tags = true
cmake.verbose = true
[project.urls]
Homepage = "https://github.com/abetlen/llama-cpp-python"
Issues = "https://github.com/abetlen/llama-cpp-python/issues"