ollama/pyproject.toml
2023-06-28 17:04:48 -07:00

25 lines
585 B
TOML

[tool.poetry]
name = "ollama"
version = "0.0.2"
description = "Run ai models locally"
authors = ["ollama team"]
readme = "README.md"
packages = [{include = "ollama"}]
scripts = {ollama = "ollama.cmd.cli:main"}
[tool.poetry.dependencies]
python = "^3.8"
llama-cpp-python = "^0.1.66"
jinja2 = "^3.1.2"
aiohttp = {version = "^3.8.4", optional = true}
aiohttp-cors = {version = "^0.7.0", optional = true}
requests = "^2.31.0"
tqdm = "^4.65.0"
[tool.poetry.extras]
server = ["aiohttp", "aiohttp_cors"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"