config.go 10.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346
  1. package envconfig
  2. import (
  3. "errors"
  4. "fmt"
  5. "log/slog"
  6. "net"
  7. "os"
  8. "path/filepath"
  9. "runtime"
  10. "strconv"
  11. "strings"
  12. )
  13. type OllamaHost struct {
  14. Scheme string
  15. Host string
  16. Port string
  17. }
  18. func (o OllamaHost) String() string {
  19. return fmt.Sprintf("%s://%s:%s", o.Scheme, o.Host, o.Port)
  20. }
  21. var ErrInvalidHostPort = errors.New("invalid port specified in OLLAMA_HOST")
  22. var (
  23. // Set via OLLAMA_ORIGINS in the environment
  24. AllowOrigins []string
  25. // Set via OLLAMA_DEBUG in the environment
  26. Debug bool
  27. // Experimental flash attention
  28. FlashAttention bool
  29. // Set via OLLAMA_HOST in the environment
  30. Host *OllamaHost
  31. // Set via OLLAMA_KEEP_ALIVE in the environment
  32. KeepAlive string
  33. // Set via OLLAMA_LLM_LIBRARY in the environment
  34. LLMLibrary string
  35. // Set via OLLAMA_MAX_LOADED_MODELS in the environment
  36. MaxRunners int
  37. // Set via OLLAMA_MAX_QUEUE in the environment
  38. MaxQueuedRequests int
  39. // Set via OLLAMA_MODELS in the environment
  40. ModelsDir string
  41. // Set via OLLAMA_MAX_VRAM in the environment
  42. MaxVRAM uint64
  43. // Set via OLLAMA_NOHISTORY in the environment
  44. NoHistory bool
  45. // Set via OLLAMA_NOPRUNE in the environment
  46. NoPrune bool
  47. // Set via OLLAMA_NUM_PARALLEL in the environment
  48. NumParallel int
  49. // Set via OLLAMA_RUNNERS_DIR in the environment
  50. RunnersDir string
  51. // Set via OLLAMA_SCHED_SPREAD in the environment
  52. SchedSpread bool
  53. // Set via OLLAMA_TMPDIR in the environment
  54. TmpDir string
  55. // Set via OLLAMA_INTEL_GPU in the environment
  56. IntelGpu bool
  57. // Set via CUDA_VISIBLE_DEVICES in the environment
  58. CudaVisibleDevices string
  59. // Set via HIP_VISIBLE_DEVICES in the environment
  60. HipVisibleDevices string
  61. // Set via ROCR_VISIBLE_DEVICES in the environment
  62. RocrVisibleDevices string
  63. // Set via GPU_DEVICE_ORDINAL in the environment
  64. GpuDeviceOrdinal string
  65. // Set via HSA_OVERRIDE_GFX_VERSION in the environment
  66. HsaOverrideGfxVersion string
  67. )
  68. type EnvVar struct {
  69. Name string
  70. Value any
  71. Description string
  72. }
  73. func AsMap() map[string]EnvVar {
  74. ret := map[string]EnvVar{
  75. "OLLAMA_DEBUG": {"OLLAMA_DEBUG", Debug, "Show additional debug information (e.g. OLLAMA_DEBUG=1)"},
  76. "OLLAMA_FLASH_ATTENTION": {"OLLAMA_FLASH_ATTENTION", FlashAttention, "Enabled flash attention"},
  77. "OLLAMA_HOST": {"OLLAMA_HOST", Host, "IP Address for the ollama server (default 127.0.0.1:11434)"},
  78. "OLLAMA_KEEP_ALIVE": {"OLLAMA_KEEP_ALIVE", KeepAlive, "The duration that models stay loaded in memory (default \"5m\")"},
  79. "OLLAMA_LLM_LIBRARY": {"OLLAMA_LLM_LIBRARY", LLMLibrary, "Set LLM library to bypass autodetection"},
  80. "OLLAMA_MAX_LOADED_MODELS": {"OLLAMA_MAX_LOADED_MODELS", MaxRunners, "Maximum number of loaded models per GPU (default auto)"},
  81. "OLLAMA_MAX_QUEUE": {"OLLAMA_MAX_QUEUE", MaxQueuedRequests, "Maximum number of queued requests"},
  82. "OLLAMA_MAX_VRAM": {"OLLAMA_MAX_VRAM", MaxVRAM, "Maximum VRAM"},
  83. "OLLAMA_MODELS": {"OLLAMA_MODELS", ModelsDir, "The path to the models directory"},
  84. "OLLAMA_NOHISTORY": {"OLLAMA_NOHISTORY", NoHistory, "Do not preserve readline history"},
  85. "OLLAMA_NOPRUNE": {"OLLAMA_NOPRUNE", NoPrune, "Do not prune model blobs on startup"},
  86. "OLLAMA_NUM_PARALLEL": {"OLLAMA_NUM_PARALLEL", NumParallel, "Maximum number of parallel requests (default auto)"},
  87. "OLLAMA_ORIGINS": {"OLLAMA_ORIGINS", AllowOrigins, "A comma separated list of allowed origins"},
  88. "OLLAMA_RUNNERS_DIR": {"OLLAMA_RUNNERS_DIR", RunnersDir, "Location for runners"},
  89. "OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread, "Always schedule model across all GPUs"},
  90. "OLLAMA_TMPDIR": {"OLLAMA_TMPDIR", TmpDir, "Location for temporary files"},
  91. }
  92. if runtime.GOOS != "darwin" {
  93. ret["CUDA_VISIBLE_DEVICES"] = EnvVar{"CUDA_VISIBLE_DEVICES", CudaVisibleDevices, "Set which NVIDIA devices are visible"}
  94. ret["HIP_VISIBLE_DEVICES"] = EnvVar{"HIP_VISIBLE_DEVICES", HipVisibleDevices, "Set which AMD devices are visible"}
  95. ret["ROCR_VISIBLE_DEVICES"] = EnvVar{"ROCR_VISIBLE_DEVICES", RocrVisibleDevices, "Set which AMD devices are visible"}
  96. ret["GPU_DEVICE_ORDINAL"] = EnvVar{"GPU_DEVICE_ORDINAL", GpuDeviceOrdinal, "Set which AMD devices are visible"}
  97. ret["HSA_OVERRIDE_GFX_VERSION"] = EnvVar{"HSA_OVERRIDE_GFX_VERSION", HsaOverrideGfxVersion, "Override the gfx used for all detected AMD GPUs"}
  98. ret["OLLAMA_INTEL_GPU"] = EnvVar{"OLLAMA_INTEL_GPU", IntelGpu, "Enable experimental Intel GPU detection"}
  99. }
  100. return ret
  101. }
  102. func Values() map[string]string {
  103. vals := make(map[string]string)
  104. for k, v := range AsMap() {
  105. vals[k] = fmt.Sprintf("%v", v.Value)
  106. }
  107. return vals
  108. }
  109. var defaultAllowOrigins = []string{
  110. "localhost",
  111. "127.0.0.1",
  112. "0.0.0.0",
  113. }
  114. // Clean quotes and spaces from the value
  115. func clean(key string) string {
  116. return strings.Trim(os.Getenv(key), "\"' ")
  117. }
  118. func init() {
  119. // default values
  120. NumParallel = 0 // Autoselect
  121. MaxRunners = 0 // Autoselect
  122. MaxQueuedRequests = 512
  123. LoadConfig()
  124. }
  125. func LoadConfig() {
  126. if debug := clean("OLLAMA_DEBUG"); debug != "" {
  127. d, err := strconv.ParseBool(debug)
  128. if err == nil {
  129. Debug = d
  130. } else {
  131. Debug = true
  132. }
  133. }
  134. if fa := clean("OLLAMA_FLASH_ATTENTION"); fa != "" {
  135. d, err := strconv.ParseBool(fa)
  136. if err == nil {
  137. FlashAttention = d
  138. }
  139. }
  140. RunnersDir = clean("OLLAMA_RUNNERS_DIR")
  141. if runtime.GOOS == "windows" && RunnersDir == "" {
  142. // On Windows we do not carry the payloads inside the main executable
  143. appExe, err := os.Executable()
  144. if err != nil {
  145. slog.Error("failed to lookup executable path", "error", err)
  146. }
  147. cwd, err := os.Getwd()
  148. if err != nil {
  149. slog.Error("failed to lookup working directory", "error", err)
  150. }
  151. var paths []string
  152. for _, root := range []string{filepath.Dir(appExe), cwd} {
  153. paths = append(paths,
  154. root,
  155. filepath.Join(root, "windows-"+runtime.GOARCH),
  156. filepath.Join(root, "dist", "windows-"+runtime.GOARCH),
  157. )
  158. }
  159. // Try a few variations to improve developer experience when building from source in the local tree
  160. for _, p := range paths {
  161. candidate := filepath.Join(p, "ollama_runners")
  162. _, err := os.Stat(candidate)
  163. if err == nil {
  164. RunnersDir = candidate
  165. break
  166. }
  167. }
  168. if RunnersDir == "" {
  169. slog.Error("unable to locate llm runner directory. Set OLLAMA_RUNNERS_DIR to the location of 'ollama_runners'")
  170. }
  171. }
  172. TmpDir = clean("OLLAMA_TMPDIR")
  173. userLimit := clean("OLLAMA_MAX_VRAM")
  174. if userLimit != "" {
  175. avail, err := strconv.ParseUint(userLimit, 10, 64)
  176. if err != nil {
  177. slog.Error("invalid setting, ignoring", "OLLAMA_MAX_VRAM", userLimit, "error", err)
  178. } else {
  179. MaxVRAM = avail
  180. }
  181. }
  182. LLMLibrary = clean("OLLAMA_LLM_LIBRARY")
  183. if onp := clean("OLLAMA_NUM_PARALLEL"); onp != "" {
  184. val, err := strconv.Atoi(onp)
  185. if err != nil {
  186. slog.Error("invalid setting, ignoring", "OLLAMA_NUM_PARALLEL", onp, "error", err)
  187. } else {
  188. NumParallel = val
  189. }
  190. }
  191. if nohistory := clean("OLLAMA_NOHISTORY"); nohistory != "" {
  192. NoHistory = true
  193. }
  194. if spread := clean("OLLAMA_SCHED_SPREAD"); spread != "" {
  195. s, err := strconv.ParseBool(spread)
  196. if err == nil {
  197. SchedSpread = s
  198. } else {
  199. SchedSpread = true
  200. }
  201. }
  202. if noprune := clean("OLLAMA_NOPRUNE"); noprune != "" {
  203. NoPrune = true
  204. }
  205. if origins := clean("OLLAMA_ORIGINS"); origins != "" {
  206. AllowOrigins = strings.Split(origins, ",")
  207. }
  208. for _, allowOrigin := range defaultAllowOrigins {
  209. AllowOrigins = append(AllowOrigins,
  210. fmt.Sprintf("http://%s", allowOrigin),
  211. fmt.Sprintf("https://%s", allowOrigin),
  212. fmt.Sprintf("http://%s", net.JoinHostPort(allowOrigin, "*")),
  213. fmt.Sprintf("https://%s", net.JoinHostPort(allowOrigin, "*")),
  214. )
  215. }
  216. AllowOrigins = append(AllowOrigins,
  217. "app://*",
  218. "file://*",
  219. "tauri://*",
  220. )
  221. maxRunners := clean("OLLAMA_MAX_LOADED_MODELS")
  222. if maxRunners != "" {
  223. m, err := strconv.Atoi(maxRunners)
  224. if err != nil {
  225. slog.Error("invalid setting, ignoring", "OLLAMA_MAX_LOADED_MODELS", maxRunners, "error", err)
  226. } else {
  227. MaxRunners = m
  228. }
  229. }
  230. if onp := os.Getenv("OLLAMA_MAX_QUEUE"); onp != "" {
  231. p, err := strconv.Atoi(onp)
  232. if err != nil || p <= 0 {
  233. slog.Error("invalid setting, ignoring", "OLLAMA_MAX_QUEUE", onp, "error", err)
  234. } else {
  235. MaxQueuedRequests = p
  236. }
  237. }
  238. KeepAlive = clean("OLLAMA_KEEP_ALIVE")
  239. var err error
  240. ModelsDir, err = getModelsDir()
  241. if err != nil {
  242. slog.Error("invalid setting", "OLLAMA_MODELS", ModelsDir, "error", err)
  243. }
  244. Host, err = getOllamaHost()
  245. if err != nil {
  246. slog.Error("invalid setting", "OLLAMA_HOST", Host, "error", err, "using default port", Host.Port)
  247. }
  248. if set, err := strconv.ParseBool(clean("OLLAMA_INTEL_GPU")); err == nil {
  249. IntelGpu = set
  250. }
  251. CudaVisibleDevices = clean("CUDA_VISIBLE_DEVICES")
  252. HipVisibleDevices = clean("HIP_VISIBLE_DEVICES")
  253. RocrVisibleDevices = clean("ROCR_VISIBLE_DEVICES")
  254. GpuDeviceOrdinal = clean("GPU_DEVICE_ORDINAL")
  255. HsaOverrideGfxVersion = clean("HSA_OVERRIDE_GFX_VERSION")
  256. }
  257. func getModelsDir() (string, error) {
  258. if models, exists := os.LookupEnv("OLLAMA_MODELS"); exists {
  259. return models, nil
  260. }
  261. home, err := os.UserHomeDir()
  262. if err != nil {
  263. return "", err
  264. }
  265. return filepath.Join(home, ".ollama", "models"), nil
  266. }
  267. func getOllamaHost() (*OllamaHost, error) {
  268. defaultPort := "11434"
  269. hostVar := os.Getenv("OLLAMA_HOST")
  270. hostVar = strings.TrimSpace(strings.Trim(strings.TrimSpace(hostVar), "\"'"))
  271. scheme, hostport, ok := strings.Cut(hostVar, "://")
  272. switch {
  273. case !ok:
  274. scheme, hostport = "http", hostVar
  275. case scheme == "http":
  276. defaultPort = "80"
  277. case scheme == "https":
  278. defaultPort = "443"
  279. }
  280. // trim trailing slashes
  281. hostport = strings.TrimRight(hostport, "/")
  282. host, port, err := net.SplitHostPort(hostport)
  283. if err != nil {
  284. host, port = "127.0.0.1", defaultPort
  285. if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
  286. host = ip.String()
  287. } else if hostport != "" {
  288. host = hostport
  289. }
  290. }
  291. if portNum, err := strconv.ParseInt(port, 10, 32); err != nil || portNum > 65535 || portNum < 0 {
  292. return &OllamaHost{
  293. Scheme: scheme,
  294. Host: host,
  295. Port: defaultPort,
  296. }, ErrInvalidHostPort
  297. }
  298. return &OllamaHost{
  299. Scheme: scheme,
  300. Host: host,
  301. Port: port,
  302. }, nil
  303. }