llama.cpp/pyproject.toml

54 lines
1.1 KiB
TOML
Raw Normal View History

[build-system]
requires = ["scikit-build-core>=0.4.5"]
build-backend = "scikit_build_core.build"
[project]
name = "llama_cpp_python"
2023-07-18 17:54:51 +00:00
version = "0.1.73"
2023-03-23 09:33:06 +00:00
description = "Python bindings for the llama.cpp library"
readme = "README.md"
license = { text = "MIT" }
authors = [
{ name = "Andrei Betlen", email = "abetlen@gmail.com" },
]
dependencies = [
"typing-extensions>=4.7.1",
"numpy>=1.24.4",
"diskcache>=5.6.1",
2023-06-09 01:49:42 +00:00
]
requires-python = ">=3.7"
2023-06-09 01:49:42 +00:00
2023-03-23 09:33:06 +00:00
[project.optional-dependencies]
server = [
"uvicorn>=0.22.0",
"fastapi>=0.100.0",
"pydantic-settings>=2.0.1",
"sse-starlette>=1.6.1",
]
test = [
"pytest>=7.4.0",
"httpx>=0.24.1",
]
dev = [
"black>=23.3.0",
"twine>=4.0.2",
"mkdocs>=1.4.3",
"mkdocstrings[python]>=0.22.0",
"mkdocs-material>=9.1.18",
"pytest>=7.4.0",
"httpx>=0.24.1",
]
all = [
"llama_cpp_python[server,test,dev]",
]
2023-03-23 09:33:06 +00:00
[tool.scikit-build]
wheel.packages = ["llama_cpp"]
wheel.expand-macos-universal-tags = true
cmake.verbose = true
[project.urls]
Homepage = "https://github.com/abetlen/llama-cpp-python"
Issues = "https://github.com/abetlen/llama-cpp-python/issues"