ollama/pyproject.toml
Bruce MacDonald f1d5036f1e v0.0.9
2023-06-30 16:28:20 -04:00

25 lines
591 B
TOML

[tool.poetry]
name = "ollama"
version = "0.0.9"
description = "Run ai models locally"
authors = ["ollama team"]
readme = "README.md"
packages = [{include = "ollama"}]
scripts = {ollama = "ollama.cmd.cli:main"}
[tool.poetry.dependencies]
python = "^3.8"
aiohttp = "^3.8.4"
aiohttp-cors = "^0.7.0"
jinja2 = "^3.1.2"
requests = "^2.31.0"
tqdm = "^4.65.0"
validators = "^0.20.0"
yaspin = "^2.3.0"
llama-cpp-python = "^0.1.67"
ctransformers = "^0.2.10"
fuzzywuzzy = {extras = ["speedup"], version = "^0.18.0"}
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"