config.go 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339
  1. package envconfig
  2. import (
  3. "errors"
  4. "fmt"
  5. "log/slog"
  6. "net"
  7. "os"
  8. "path/filepath"
  9. "runtime"
  10. "strconv"
  11. "strings"
  12. )
  13. type OllamaHost struct {
  14. Scheme string
  15. Host string
  16. Port string
  17. }
  18. func (o OllamaHost) String() string {
  19. return fmt.Sprintf("%s://%s:%s", o.Scheme, o.Host, o.Port)
  20. }
  21. var ErrInvalidHostPort = errors.New("invalid port specified in OLLAMA_HOST")
  22. var (
  23. // Set via OLLAMA_ORIGINS in the environment
  24. AllowOrigins []string
  25. // Set via OLLAMA_DEBUG in the environment
  26. Debug bool
  27. // Experimental flash attention
  28. FlashAttention bool
  29. // Set via OLLAMA_HOST in the environment
  30. Host *OllamaHost
  31. // Set via OLLAMA_KEEP_ALIVE in the environment
  32. KeepAlive string
  33. // Set via OLLAMA_LLM_LIBRARY in the environment
  34. LLMLibrary string
  35. // Set via OLLAMA_MAX_LOADED_MODELS in the environment
  36. MaxRunners int
  37. // Set via OLLAMA_MAX_QUEUE in the environment
  38. MaxQueuedRequests int
  39. // Set via OLLAMA_MODELS in the environment
  40. ModelsDir string
  41. // Set via OLLAMA_MAX_VRAM in the environment
  42. MaxVRAM uint64
  43. // Set via OLLAMA_NOHISTORY in the environment
  44. NoHistory bool
  45. // Set via OLLAMA_NOPRUNE in the environment
  46. NoPrune bool
  47. // Set via OLLAMA_NUM_PARALLEL in the environment
  48. NumParallel int
  49. // Set via OLLAMA_RUNNERS_DIR in the environment
  50. RunnersDir string
  51. // Set via OLLAMA_SCHED_SPREAD in the environment
  52. SchedSpread bool
  53. // Set via OLLAMA_TMPDIR in the environment
  54. TmpDir string
  55. // Set via CUDA_VISIBLE_DEVICES in the environment
  56. CudaVisibleDevices string
  57. // Set via HIP_VISIBLE_DEVICES in the environment
  58. HipVisibleDevices string
  59. // Set via ROCR_VISIBLE_DEVICES in the environment
  60. RocrVisibleDevices string
  61. // Set via GPU_DEVICE_ORDINAL in the environment
  62. GpuDeviceOrdinal string
  63. // Set via HSA_OVERRIDE_GFX_VERSION in the environment
  64. HsaOverrideGfxVersion string
  65. )
  66. type EnvVar struct {
  67. Name string
  68. Value any
  69. Description string
  70. }
  71. func AsMap() map[string]EnvVar {
  72. ret := map[string]EnvVar{
  73. "OLLAMA_DEBUG": {"OLLAMA_DEBUG", Debug, "Show additional debug information (e.g. OLLAMA_DEBUG=1)"},
  74. "OLLAMA_FLASH_ATTENTION": {"OLLAMA_FLASH_ATTENTION", FlashAttention, "Enabled flash attention"},
  75. "OLLAMA_HOST": {"OLLAMA_HOST", Host, "IP Address for the ollama server (default 127.0.0.1:11434)"},
  76. "OLLAMA_KEEP_ALIVE": {"OLLAMA_KEEP_ALIVE", KeepAlive, "The duration that models stay loaded in memory (default \"5m\")"},
  77. "OLLAMA_LLM_LIBRARY": {"OLLAMA_LLM_LIBRARY", LLMLibrary, "Set LLM library to bypass autodetection"},
  78. "OLLAMA_MAX_LOADED_MODELS": {"OLLAMA_MAX_LOADED_MODELS", MaxRunners, "Maximum number of loaded models (default 1)"},
  79. "OLLAMA_MAX_QUEUE": {"OLLAMA_MAX_QUEUE", MaxQueuedRequests, "Maximum number of queued requests"},
  80. "OLLAMA_MAX_VRAM": {"OLLAMA_MAX_VRAM", MaxVRAM, "Maximum VRAM"},
  81. "OLLAMA_MODELS": {"OLLAMA_MODELS", ModelsDir, "The path to the models directory"},
  82. "OLLAMA_NOHISTORY": {"OLLAMA_NOHISTORY", NoHistory, "Do not preserve readline history"},
  83. "OLLAMA_NOPRUNE": {"OLLAMA_NOPRUNE", NoPrune, "Do not prune model blobs on startup"},
  84. "OLLAMA_NUM_PARALLEL": {"OLLAMA_NUM_PARALLEL", NumParallel, "Maximum number of parallel requests (default 1)"},
  85. "OLLAMA_ORIGINS": {"OLLAMA_ORIGINS", AllowOrigins, "A comma separated list of allowed origins"},
  86. "OLLAMA_RUNNERS_DIR": {"OLLAMA_RUNNERS_DIR", RunnersDir, "Location for runners"},
  87. "OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread, "Always schedule model across all GPUs"},
  88. "OLLAMA_TMPDIR": {"OLLAMA_TMPDIR", TmpDir, "Location for temporary files"},
  89. }
  90. if runtime.GOOS != "darwin" {
  91. ret["CUDA_VISIBLE_DEVICES"] = EnvVar{"CUDA_VISIBLE_DEVICES", CudaVisibleDevices, "Set which NVIDIA devices are visible"}
  92. ret["HIP_VISIBLE_DEVICES"] = EnvVar{"HIP_VISIBLE_DEVICES", HipVisibleDevices, "Set which AMD devices are visible"}
  93. ret["ROCR_VISIBLE_DEVICES"] = EnvVar{"ROCR_VISIBLE_DEVICES", RocrVisibleDevices, "Set which AMD devices are visible"}
  94. ret["GPU_DEVICE_ORDINAL"] = EnvVar{"GPU_DEVICE_ORDINAL", GpuDeviceOrdinal, "Set which AMD devices are visible"}
  95. ret["HSA_OVERRIDE_GFX_VERSION"] = EnvVar{"HSA_OVERRIDE_GFX_VERSION", HsaOverrideGfxVersion, "Override the gfx used for all detected AMD GPUs"}
  96. }
  97. return ret
  98. }
  99. func Values() map[string]string {
  100. vals := make(map[string]string)
  101. for k, v := range AsMap() {
  102. vals[k] = fmt.Sprintf("%v", v.Value)
  103. }
  104. return vals
  105. }
  106. var defaultAllowOrigins = []string{
  107. "localhost",
  108. "127.0.0.1",
  109. "0.0.0.0",
  110. }
  111. // Clean quotes and spaces from the value
  112. func clean(key string) string {
  113. return strings.Trim(os.Getenv(key), "\"' ")
  114. }
  115. func init() {
  116. // default values
  117. NumParallel = 1
  118. MaxRunners = 1
  119. MaxQueuedRequests = 512
  120. LoadConfig()
  121. }
  122. func LoadConfig() {
  123. if debug := clean("OLLAMA_DEBUG"); debug != "" {
  124. d, err := strconv.ParseBool(debug)
  125. if err == nil {
  126. Debug = d
  127. } else {
  128. Debug = true
  129. }
  130. }
  131. if fa := clean("OLLAMA_FLASH_ATTENTION"); fa != "" {
  132. d, err := strconv.ParseBool(fa)
  133. if err == nil {
  134. FlashAttention = d
  135. }
  136. }
  137. RunnersDir = clean("OLLAMA_RUNNERS_DIR")
  138. if runtime.GOOS == "windows" && RunnersDir == "" {
  139. // On Windows we do not carry the payloads inside the main executable
  140. appExe, err := os.Executable()
  141. if err != nil {
  142. slog.Error("failed to lookup executable path", "error", err)
  143. }
  144. cwd, err := os.Getwd()
  145. if err != nil {
  146. slog.Error("failed to lookup working directory", "error", err)
  147. }
  148. var paths []string
  149. for _, root := range []string{filepath.Dir(appExe), cwd} {
  150. paths = append(paths,
  151. root,
  152. filepath.Join(root, "windows-"+runtime.GOARCH),
  153. filepath.Join(root, "dist", "windows-"+runtime.GOARCH),
  154. )
  155. }
  156. // Try a few variations to improve developer experience when building from source in the local tree
  157. for _, p := range paths {
  158. candidate := filepath.Join(p, "ollama_runners")
  159. _, err := os.Stat(candidate)
  160. if err == nil {
  161. RunnersDir = candidate
  162. break
  163. }
  164. }
  165. if RunnersDir == "" {
  166. slog.Error("unable to locate llm runner directory. Set OLLAMA_RUNNERS_DIR to the location of 'ollama_runners'")
  167. }
  168. }
  169. TmpDir = clean("OLLAMA_TMPDIR")
  170. userLimit := clean("OLLAMA_MAX_VRAM")
  171. if userLimit != "" {
  172. avail, err := strconv.ParseUint(userLimit, 10, 64)
  173. if err != nil {
  174. slog.Error("invalid setting, ignoring", "OLLAMA_MAX_VRAM", userLimit, "error", err)
  175. } else {
  176. MaxVRAM = avail
  177. }
  178. }
  179. LLMLibrary = clean("OLLAMA_LLM_LIBRARY")
  180. if onp := clean("OLLAMA_NUM_PARALLEL"); onp != "" {
  181. val, err := strconv.Atoi(onp)
  182. if err != nil || val <= 0 {
  183. slog.Error("invalid setting must be greater than zero", "OLLAMA_NUM_PARALLEL", onp, "error", err)
  184. } else {
  185. NumParallel = val
  186. }
  187. }
  188. if nohistory := clean("OLLAMA_NOHISTORY"); nohistory != "" {
  189. NoHistory = true
  190. }
  191. if spread := clean("OLLAMA_SCHED_SPREAD"); spread != "" {
  192. s, err := strconv.ParseBool(spread)
  193. if err == nil {
  194. SchedSpread = s
  195. } else {
  196. SchedSpread = true
  197. }
  198. }
  199. if noprune := clean("OLLAMA_NOPRUNE"); noprune != "" {
  200. NoPrune = true
  201. }
  202. if origins := clean("OLLAMA_ORIGINS"); origins != "" {
  203. AllowOrigins = strings.Split(origins, ",")
  204. }
  205. for _, allowOrigin := range defaultAllowOrigins {
  206. AllowOrigins = append(AllowOrigins,
  207. fmt.Sprintf("http://%s", allowOrigin),
  208. fmt.Sprintf("https://%s", allowOrigin),
  209. fmt.Sprintf("http://%s", net.JoinHostPort(allowOrigin, "*")),
  210. fmt.Sprintf("https://%s", net.JoinHostPort(allowOrigin, "*")),
  211. )
  212. }
  213. AllowOrigins = append(AllowOrigins,
  214. "app://*",
  215. "file://*",
  216. "tauri://*",
  217. )
  218. maxRunners := clean("OLLAMA_MAX_LOADED_MODELS")
  219. if maxRunners != "" {
  220. m, err := strconv.Atoi(maxRunners)
  221. if err != nil {
  222. slog.Error("invalid setting", "OLLAMA_MAX_LOADED_MODELS", maxRunners, "error", err)
  223. } else {
  224. MaxRunners = m
  225. }
  226. }
  227. if onp := os.Getenv("OLLAMA_MAX_QUEUE"); onp != "" {
  228. p, err := strconv.Atoi(onp)
  229. if err != nil || p <= 0 {
  230. slog.Error("invalid setting", "OLLAMA_MAX_QUEUE", onp, "error", err)
  231. } else {
  232. MaxQueuedRequests = p
  233. }
  234. }
  235. KeepAlive = clean("OLLAMA_KEEP_ALIVE")
  236. var err error
  237. ModelsDir, err = getModelsDir()
  238. if err != nil {
  239. slog.Error("invalid setting", "OLLAMA_MODELS", ModelsDir, "error", err)
  240. }
  241. Host, err = getOllamaHost()
  242. if err != nil {
  243. slog.Error("invalid setting", "OLLAMA_HOST", Host, "error", err, "using default port", Host.Port)
  244. }
  245. CudaVisibleDevices = clean("CUDA_VISIBLE_DEVICES")
  246. HipVisibleDevices = clean("HIP_VISIBLE_DEVICES")
  247. RocrVisibleDevices = clean("ROCR_VISIBLE_DEVICES")
  248. GpuDeviceOrdinal = clean("GPU_DEVICE_ORDINAL")
  249. HsaOverrideGfxVersion = clean("HSA_OVERRIDE_GFX_VERSION")
  250. }
  251. func getModelsDir() (string, error) {
  252. if models, exists := os.LookupEnv("OLLAMA_MODELS"); exists {
  253. return models, nil
  254. }
  255. home, err := os.UserHomeDir()
  256. if err != nil {
  257. return "", err
  258. }
  259. return filepath.Join(home, ".ollama", "models"), nil
  260. }
  261. func getOllamaHost() (*OllamaHost, error) {
  262. defaultPort := "11434"
  263. hostVar := os.Getenv("OLLAMA_HOST")
  264. hostVar = strings.TrimSpace(strings.Trim(strings.TrimSpace(hostVar), "\"'"))
  265. scheme, hostport, ok := strings.Cut(hostVar, "://")
  266. switch {
  267. case !ok:
  268. scheme, hostport = "http", hostVar
  269. case scheme == "http":
  270. defaultPort = "80"
  271. case scheme == "https":
  272. defaultPort = "443"
  273. }
  274. // trim trailing slashes
  275. hostport = strings.TrimRight(hostport, "/")
  276. host, port, err := net.SplitHostPort(hostport)
  277. if err != nil {
  278. host, port = "127.0.0.1", defaultPort
  279. if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
  280. host = ip.String()
  281. } else if hostport != "" {
  282. host = hostport
  283. }
  284. }
  285. if portNum, err := strconv.ParseInt(port, 10, 32); err != nil || portNum > 65535 || portNum < 0 {
  286. return &OllamaHost{
  287. Scheme: scheme,
  288. Host: host,
  289. Port: defaultPort,
  290. }, ErrInvalidHostPort
  291. }
  292. return &OllamaHost{
  293. Scheme: scheme,
  294. Host: host,
  295. Port: port,
  296. }, nil
  297. }