llama.cpp/pyproject.toml
dependabot[bot] efcf380490
Bump fastapi from 0.96.0 to 0.97.0
Bumps [fastapi](https://github.com/tiangolo/fastapi) from 0.96.0 to 0.97.0.
- [Release notes](https://github.com/tiangolo/fastapi/releases)
- [Commits](https://github.com/tiangolo/fastapi/compare/0.96.0...0.97.0)

---
updated-dependencies:
- dependency-name: fastapi
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-06-12 21:03:40 +00:00

44 lines
1.1 KiB
TOML

[tool.poetry]
name = "llama_cpp_python"
version = "0.1.62"
description = "Python bindings for the llama.cpp library"
authors = ["Andrei Betlen <abetlen@gmail.com>"]
license = "MIT"
readme = "README.md"
homepage = "https://github.com/abetlen/llama-cpp-python"
repository = "https://github.com/abetlen/llama-cpp-python"
packages = [{include = "llama_cpp"}]
include = [
"LICENSE.md",
]
[tool.poetry.dependencies]
python = "^3.8.1"
typing-extensions = "^4.6.3"
numpy = "^1.20.0"
diskcache = "^5.6.1"
uvicorn = { version = "^0.22.0", optional = true }
fastapi = { version = "^0.97.0", optional = true }
sse-starlette = { version = "^1.6.1", optional = true }
[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
twine = "^4.0.2"
mkdocs = "^1.4.3"
mkdocstrings = {extras = ["python"], version = "^0.22.0"}
mkdocs-material = "^9.1.15"
pytest = "^7.3.1"
httpx = "^0.24.1"
scikit-build = "0.17.6"
[tool.poetry.extras]
server = ["uvicorn", "fastapi", "sse-starlette"]
[build-system]
requires = [
"setuptools>=42",
"scikit-build>=0.13",
"cmake>=3.18",
"ninja",
]
build-backend = "setuptools.build_meta"