0cc8d8282a
Bumps [uvicorn](https://github.com/encode/uvicorn) from 0.23.1 to 0.23.2. - [Release notes](https://github.com/encode/uvicorn/releases) - [Changelog](https://github.com/encode/uvicorn/blob/master/CHANGELOG.md) - [Commits](https://github.com/encode/uvicorn/compare/0.23.1...0.23.2) --- updated-dependencies: - dependency-name: uvicorn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com>
45 lines
No EOL
1.2 KiB
TOML
45 lines
No EOL
1.2 KiB
TOML
[tool.poetry]
|
|
name = "llama_cpp_python"
|
|
version = "0.1.77"
|
|
description = "Python bindings for the llama.cpp library"
|
|
authors = ["Andrei Betlen <abetlen@gmail.com>"]
|
|
license = "MIT"
|
|
readme = "README.md"
|
|
homepage = "https://github.com/abetlen/llama-cpp-python"
|
|
repository = "https://github.com/abetlen/llama-cpp-python"
|
|
packages = [{include = "llama_cpp"}]
|
|
include = [
|
|
"LICENSE.md",
|
|
]
|
|
|
|
[tool.poetry.dependencies]
|
|
python = "^3.8.1"
|
|
typing-extensions = "^4.7.1"
|
|
numpy = "^1.24.4"
|
|
diskcache = "^5.6.1"
|
|
uvicorn = { version = "^0.23.2", optional = true }
|
|
fastapi = { version = ">=0.100.0", optional = true }
|
|
sse-starlette = { version = ">=1.6.1", optional = true }
|
|
pydantic-settings = { version = ">=2.0.1", optional = true }
|
|
|
|
[tool.poetry.group.dev.dependencies]
|
|
black = "^23.7.0"
|
|
twine = "^4.0.2"
|
|
mkdocs = "^1.4.3"
|
|
mkdocstrings = {extras = ["python"], version = "^0.22.0"}
|
|
mkdocs-material = "^9.1.21"
|
|
pytest = "^7.4.0"
|
|
httpx = "^0.24.1"
|
|
scikit-build = "0.17.6"
|
|
|
|
[tool.poetry.extras]
|
|
server = ["uvicorn", "fastapi", "pydantic-settings", "sse-starlette"]
|
|
|
|
[build-system]
|
|
requires = [
|
|
"setuptools>=42",
|
|
"scikit-build>=0.13",
|
|
"cmake>=3.18",
|
|
"ninja",
|
|
]
|
|
build-backend = "setuptools.build_meta" |