llama.cpp/pyproject.toml

77 lines
1.9 KiB
TOML
Raw Normal View History

[build-system]
requires = ["scikit-build-core[pyproject]>=0.5.1"]
build-backend = "scikit_build_core.build"
[project]
name = "llama_cpp_python"
2023-09-12 22:56:36 +00:00
dynamic = ["version"]
2023-03-23 09:33:06 +00:00
description = "Python bindings for the llama.cpp library"
readme = "README.md"
license = { text = "MIT" }
authors = [
{ name = "Andrei Betlen", email = "abetlen@gmail.com" },
]
dependencies = [
2023-07-18 23:30:06 +00:00
"typing-extensions>=4.5.0",
"numpy>=1.20.0",
"diskcache>=5.6.1",
Integration of Jinja2 Templating (#875) * feat: Add support for jinja templating Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * fix: Refactor chat formatter and update interface for jinja templates - Simplify the `llama2_template` in `llama_jinja_format.py` by removing unnecessary line breaks for readability without affecting functionality. - Update `ChatFormatterInterface` constructor to accept a more generic `Optional[object]` type for the template parameter, enhancing flexibility. - Introduce a `template` property to `ChatFormatterInterface` for standardized access to the template string. - Replace `MetaSingleton` metaclass with `Singleton` for the `ChatFormatterFactory` to streamline the singleton implementation. These changes enhance code readability, maintain usability, and ensure consistency in the chat formatter's design pattern usage. * Add outline for Jinja2 templating integration documentation Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * Add jinja2 as a dependency with version range for Hugging Face transformers compatibility Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * Update jinja2 version constraint for mkdocs-material compatibility Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com> * Fix attribute name in AutoChatFormatter - Changed attribute name from `self._renderer` to `self._environment` --------- Signed-off-by: teleprint-me <77757836+teleprint-me@users.noreply.github.com>
2024-01-17 14:47:52 +00:00
"jinja2>=2.11.3",
2023-06-09 01:49:42 +00:00
]
2023-07-18 23:37:52 +00:00
requires-python = ">=3.8"
classifiers = [
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
2023-11-06 14:34:07 +00:00
"Programming Language :: Python :: 3.12",
]
2023-06-09 01:49:42 +00:00
2023-03-23 09:33:06 +00:00
[project.optional-dependencies]
server = [
"uvicorn>=0.22.0",
"fastapi>=0.100.0",
"pydantic-settings>=2.0.1",
"sse-starlette>=1.6.1",
"starlette-context>=0.3.6,<0.4",
"PyYAML>=5.1",
]
test = [
"pytest>=7.4.0",
"httpx>=0.24.1",
"scipy>=1.10",
]
dev = [
"black>=23.3.0",
"twine>=4.0.2",
"mkdocs>=1.4.3",
"mkdocstrings[python]>=0.22.0",
"mkdocs-material>=9.1.18",
"pytest>=7.4.0",
"httpx>=0.24.1",
]
all = [
"llama_cpp_python[server,test,dev]",
]
2023-03-23 09:33:06 +00:00
[tool.scikit-build]
wheel.packages = ["llama_cpp"]
cmake.verbose = true
cmake.minimum-version = "3.21"
minimum-version = "0.5.1"
2023-09-30 04:01:14 +00:00
sdist.include = [".git", "vendor/llama.cpp/.git"]
2023-09-12 22:56:36 +00:00
[tool.scikit-build.metadata.version]
provider = "scikit_build_core.metadata.regex"
input = "llama_cpp/__init__.py"
[project.urls]
Homepage = "https://github.com/abetlen/llama-cpp-python"
Issues = "https://github.com/abetlen/llama-cpp-python/issues"
Documentation = "https://llama-cpp-python.readthedocs.io/en/latest/"
Changelog = "https://llama-cpp-python.readthedocs.io/en/latest/changelog/"
2023-08-25 18:35:27 +00:00
[tool.pytest.ini_options]
testpaths = "tests"