ollama/pyproject.toml

26 lines
585 B
TOML
Raw Normal View History

2023-06-27 20:55:53 +00:00
[tool.poetry]
name = "ollama"
2023-06-28 15:43:14 +00:00
version = "0.0.2"
description = "Run ai models locally"
2023-06-28 16:51:04 +00:00
authors = ["ollama team"]
2023-06-27 20:55:53 +00:00
readme = "README.md"
packages = [{include = "ollama"}]
scripts = {ollama = "ollama.cmd.cli:main"}
[tool.poetry.dependencies]
2023-06-28 21:46:07 +00:00
python = "^3.8"
2023-06-27 20:55:53 +00:00
llama-cpp-python = "^0.1.66"
2023-06-27 22:46:05 +00:00
jinja2 = "^3.1.2"
2023-06-27 20:55:53 +00:00
aiohttp = {version = "^3.8.4", optional = true}
aiohttp-cors = {version = "^0.7.0", optional = true}
2023-06-28 16:13:13 +00:00
requests = "^2.31.0"
tqdm = "^4.65.0"
2023-06-27 20:55:53 +00:00
[tool.poetry.extras]
server = ["aiohttp", "aiohttp_cors"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"