[tool.poetry] name = "ollama" version = "0.0.9" description = "Run ai models locally" authors = ["ollama team"] readme = "README.md" packages = [{include = "ollama"}] scripts = {ollama = "ollama.cmd.cli:main"} [tool.poetry.dependencies] python = "^3.8" aiohttp = "^3.8.4" aiohttp-cors = "^0.7.0" jinja2 = "^3.1.2" requests = "^2.31.0" tqdm = "^4.65.0" validators = "^0.20.0" yaspin = "^2.3.0" llama-cpp-python = "^0.1.67" ctransformers = "^0.2.10" thefuzz = {version = "^0.19.0", extras = ["speedup"]} [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api"