docker-compose.yml 969 B

1234567891011121314151617181920212223242526272829303132333435363738394041424344
  1. version: '3.6'
  2. services:
  3. ollama:
  4. # Uncomment below for GPU support
  5. # deploy:
  6. # resources:
  7. # reservations:
  8. # devices:
  9. # - driver: nvidia
  10. # count: 1
  11. # capabilities:
  12. # - gpu
  13. volumes:
  14. - ollama:/root/.ollama
  15. # Uncomment below to expose Ollama API outside the container stack
  16. # ports:
  17. # - 11434:11434
  18. container_name: ollama
  19. pull_policy: always
  20. tty: true
  21. restart: unless-stopped
  22. image: ollama/ollama:latest
  23. ollama-webui:
  24. build:
  25. context: .
  26. args:
  27. OLLAMA_API_BASE_URL: '/ollama/api'
  28. dockerfile: Dockerfile
  29. image: ollama-webui:latest
  30. container_name: ollama-webui
  31. depends_on:
  32. - ollama
  33. ports:
  34. - 3000:8080
  35. environment:
  36. - "OLLAMA_API_BASE_URL=http://ollama:11434/api"
  37. extra_hosts:
  38. - host.docker.internal:host-gateway
  39. restart: unless-stopped
  40. volumes:
  41. ollama: {}