pyproject.toml 588 B

12345678910111213141516171819202122232425
  1. [tool.poetry]
  2. name = "ollama"
  3. version = "0.0.9"
  4. description = "Run ai models locally"
  5. authors = ["ollama team"]
  6. readme = "README.md"
  7. packages = [{include = "ollama"}]
  8. scripts = {ollama = "ollama.cmd.cli:main"}
  9. [tool.poetry.dependencies]
  10. python = "^3.8"
  11. aiohttp = "^3.8.4"
  12. aiohttp-cors = "^0.7.0"
  13. jinja2 = "^3.1.2"
  14. requests = "^2.31.0"
  15. tqdm = "^4.65.0"
  16. validators = "^0.20.0"
  17. yaspin = "^2.3.0"
  18. llama-cpp-python = "^0.1.67"
  19. ctransformers = "^0.2.10"
  20. thefuzz = {version = "^0.19.0", extras = ["speedup"]}
  21. [build-system]
  22. requires = ["poetry-core"]
  23. build-backend = "poetry.core.masonry.api"