config.go 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370
  1. package envconfig
  2. import (
  3. "errors"
  4. "fmt"
  5. "log/slog"
  6. "math"
  7. "net"
  8. "os"
  9. "path/filepath"
  10. "runtime"
  11. "strconv"
  12. "strings"
  13. "time"
  14. )
  15. type OllamaHost struct {
  16. Scheme string
  17. Host string
  18. Port string
  19. }
  20. func (o OllamaHost) String() string {
  21. return fmt.Sprintf("%s://%s:%s", o.Scheme, o.Host, o.Port)
  22. }
  23. var ErrInvalidHostPort = errors.New("invalid port specified in OLLAMA_HOST")
  24. var (
  25. // Set via OLLAMA_ORIGINS in the environment
  26. AllowOrigins []string
  27. // Set via OLLAMA_DEBUG in the environment
  28. Debug bool
  29. // Experimental flash attention
  30. FlashAttention bool
  31. // Set via OLLAMA_HOST in the environment
  32. Host *OllamaHost
  33. // Set via OLLAMA_KEEP_ALIVE in the environment
  34. KeepAlive time.Duration
  35. // Set via OLLAMA_LLM_LIBRARY in the environment
  36. LLMLibrary string
  37. // Set via OLLAMA_MAX_LOADED_MODELS in the environment
  38. MaxRunners int
  39. // Set via OLLAMA_MAX_QUEUE in the environment
  40. MaxQueuedRequests int
  41. // Set via OLLAMA_MODELS in the environment
  42. ModelsDir string
  43. // Set via OLLAMA_NEW_RUNNERS in the environment
  44. NewRunners bool
  45. // Set via OLLAMA_NOHISTORY in the environment
  46. NoHistory bool
  47. // Set via OLLAMA_NOPRUNE in the environment
  48. NoPrune bool
  49. // Set via OLLAMA_NUM_PARALLEL in the environment
  50. NumParallel int
  51. // Set via OLLAMA_RUNNERS_DIR in the environment
  52. RunnersDir string
  53. // Set via OLLAMA_SCHED_SPREAD in the environment
  54. SchedSpread bool
  55. // Set via OLLAMA_TMPDIR in the environment
  56. TmpDir string
  57. // Set via OLLAMA_INTEL_GPU in the environment
  58. IntelGpu bool
  59. // Set via CUDA_VISIBLE_DEVICES in the environment
  60. CudaVisibleDevices string
  61. // Set via HIP_VISIBLE_DEVICES in the environment
  62. HipVisibleDevices string
  63. // Set via ROCR_VISIBLE_DEVICES in the environment
  64. RocrVisibleDevices string
  65. // Set via GPU_DEVICE_ORDINAL in the environment
  66. GpuDeviceOrdinal string
  67. // Set via HSA_OVERRIDE_GFX_VERSION in the environment
  68. HsaOverrideGfxVersion string
  69. )
  70. type EnvVar struct {
  71. Name string
  72. Value any
  73. Description string
  74. }
  75. func AsMap() map[string]EnvVar {
  76. ret := map[string]EnvVar{
  77. "OLLAMA_DEBUG": {"OLLAMA_DEBUG", Debug, "Show additional debug information (e.g. OLLAMA_DEBUG=1)"},
  78. "OLLAMA_FLASH_ATTENTION": {"OLLAMA_FLASH_ATTENTION", FlashAttention, "Enabled flash attention"},
  79. "OLLAMA_HOST": {"OLLAMA_HOST", Host, "IP Address for the ollama server (default 127.0.0.1:11434)"},
  80. "OLLAMA_KEEP_ALIVE": {"OLLAMA_KEEP_ALIVE", KeepAlive, "The duration that models stay loaded in memory (default \"5m\")"},
  81. "OLLAMA_LLM_LIBRARY": {"OLLAMA_LLM_LIBRARY", LLMLibrary, "Set LLM library to bypass autodetection"},
  82. "OLLAMA_MAX_LOADED_MODELS": {"OLLAMA_MAX_LOADED_MODELS", MaxRunners, "Maximum number of loaded models per GPU"},
  83. "OLLAMA_MAX_QUEUE": {"OLLAMA_MAX_QUEUE", MaxQueuedRequests, "Maximum number of queued requests"},
  84. "OLLAMA_MODELS": {"OLLAMA_MODELS", ModelsDir, "The path to the models directory"},
  85. "OLLAMA_NEW_RUNNERS": {"OLLAMA_NEW_RUNNERS", NewRunners, "Enable new experimental runners"},
  86. "OLLAMA_NOHISTORY": {"OLLAMA_NOHISTORY", NoHistory, "Do not preserve readline history"},
  87. "OLLAMA_NOPRUNE": {"OLLAMA_NOPRUNE", NoPrune, "Do not prune model blobs on startup"},
  88. "OLLAMA_NUM_PARALLEL": {"OLLAMA_NUM_PARALLEL", NumParallel, "Maximum number of parallel requests"},
  89. "OLLAMA_ORIGINS": {"OLLAMA_ORIGINS", AllowOrigins, "A comma separated list of allowed origins"},
  90. "OLLAMA_RUNNERS_DIR": {"OLLAMA_RUNNERS_DIR", RunnersDir, "Location for runners"},
  91. "OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread, "Always schedule model across all GPUs"},
  92. "OLLAMA_TMPDIR": {"OLLAMA_TMPDIR", TmpDir, "Location for temporary files"},
  93. }
  94. if runtime.GOOS != "darwin" {
  95. ret["CUDA_VISIBLE_DEVICES"] = EnvVar{"CUDA_VISIBLE_DEVICES", CudaVisibleDevices, "Set which NVIDIA devices are visible"}
  96. ret["HIP_VISIBLE_DEVICES"] = EnvVar{"HIP_VISIBLE_DEVICES", HipVisibleDevices, "Set which AMD devices are visible"}
  97. ret["ROCR_VISIBLE_DEVICES"] = EnvVar{"ROCR_VISIBLE_DEVICES", RocrVisibleDevices, "Set which AMD devices are visible"}
  98. ret["GPU_DEVICE_ORDINAL"] = EnvVar{"GPU_DEVICE_ORDINAL", GpuDeviceOrdinal, "Set which AMD devices are visible"}
  99. ret["HSA_OVERRIDE_GFX_VERSION"] = EnvVar{"HSA_OVERRIDE_GFX_VERSION", HsaOverrideGfxVersion, "Override the gfx used for all detected AMD GPUs"}
  100. ret["OLLAMA_INTEL_GPU"] = EnvVar{"OLLAMA_INTEL_GPU", IntelGpu, "Enable experimental Intel GPU detection"}
  101. }
  102. return ret
  103. }
  104. func Values() map[string]string {
  105. vals := make(map[string]string)
  106. for k, v := range AsMap() {
  107. vals[k] = fmt.Sprintf("%v", v.Value)
  108. }
  109. return vals
  110. }
  111. var defaultAllowOrigins = []string{
  112. "localhost",
  113. "127.0.0.1",
  114. "0.0.0.0",
  115. }
  116. // Clean quotes and spaces from the value
  117. func clean(key string) string {
  118. return strings.Trim(os.Getenv(key), "\"' ")
  119. }
  120. func init() {
  121. // default values
  122. NumParallel = 0 // Autoselect
  123. MaxRunners = 0 // Autoselect
  124. MaxQueuedRequests = 512
  125. KeepAlive = 5 * time.Minute
  126. LoadConfig()
  127. }
  128. func LoadConfig() {
  129. if debug := clean("OLLAMA_DEBUG"); debug != "" {
  130. d, err := strconv.ParseBool(debug)
  131. if err == nil {
  132. Debug = d
  133. } else {
  134. Debug = true
  135. }
  136. }
  137. if fa := clean("OLLAMA_FLASH_ATTENTION"); fa != "" {
  138. d, err := strconv.ParseBool(fa)
  139. if err == nil {
  140. FlashAttention = d
  141. }
  142. }
  143. RunnersDir = clean("OLLAMA_RUNNERS_DIR")
  144. if runtime.GOOS == "windows" && RunnersDir == "" {
  145. // On Windows we do not carry the payloads inside the main executable
  146. appExe, err := os.Executable()
  147. if err != nil {
  148. slog.Error("failed to lookup executable path", "error", err)
  149. }
  150. cwd, err := os.Getwd()
  151. if err != nil {
  152. slog.Error("failed to lookup working directory", "error", err)
  153. }
  154. var paths []string
  155. for _, root := range []string{filepath.Dir(appExe), cwd} {
  156. paths = append(paths,
  157. root,
  158. filepath.Join(root, runtime.GOOS+"-"+runtime.GOARCH),
  159. filepath.Join(root, "dist", runtime.GOOS+"-"+runtime.GOARCH),
  160. )
  161. }
  162. // Try a few variations to improve developer experience when building from source in the local tree
  163. for _, p := range paths {
  164. candidate := filepath.Join(p, "ollama_runners")
  165. _, err := os.Stat(candidate)
  166. if err == nil {
  167. RunnersDir = candidate
  168. break
  169. }
  170. }
  171. if RunnersDir == "" {
  172. slog.Error("unable to locate llm runner directory. Set OLLAMA_RUNNERS_DIR to the location of 'ollama_runners'")
  173. }
  174. }
  175. TmpDir = clean("OLLAMA_TMPDIR")
  176. LLMLibrary = clean("OLLAMA_LLM_LIBRARY")
  177. if onp := clean("OLLAMA_NUM_PARALLEL"); onp != "" {
  178. val, err := strconv.Atoi(onp)
  179. if err != nil {
  180. slog.Error("invalid setting, ignoring", "OLLAMA_NUM_PARALLEL", onp, "error", err)
  181. } else {
  182. NumParallel = val
  183. }
  184. }
  185. if nohistory := clean("OLLAMA_NOHISTORY"); nohistory != "" {
  186. NoHistory = true
  187. }
  188. if spread := clean("OLLAMA_SCHED_SPREAD"); spread != "" {
  189. s, err := strconv.ParseBool(spread)
  190. if err == nil {
  191. SchedSpread = s
  192. } else {
  193. SchedSpread = true
  194. }
  195. }
  196. if noprune := clean("OLLAMA_NOPRUNE"); noprune != "" {
  197. NoPrune = true
  198. }
  199. if origins := clean("OLLAMA_ORIGINS"); origins != "" {
  200. AllowOrigins = strings.Split(origins, ",")
  201. }
  202. for _, allowOrigin := range defaultAllowOrigins {
  203. AllowOrigins = append(AllowOrigins,
  204. fmt.Sprintf("http://%s", allowOrigin),
  205. fmt.Sprintf("https://%s", allowOrigin),
  206. fmt.Sprintf("http://%s", net.JoinHostPort(allowOrigin, "*")),
  207. fmt.Sprintf("https://%s", net.JoinHostPort(allowOrigin, "*")),
  208. )
  209. }
  210. AllowOrigins = append(AllowOrigins,
  211. "app://*",
  212. "file://*",
  213. "tauri://*",
  214. )
  215. maxRunners := clean("OLLAMA_MAX_LOADED_MODELS")
  216. if maxRunners != "" {
  217. m, err := strconv.Atoi(maxRunners)
  218. if err != nil {
  219. slog.Error("invalid setting, ignoring", "OLLAMA_MAX_LOADED_MODELS", maxRunners, "error", err)
  220. } else {
  221. MaxRunners = m
  222. }
  223. }
  224. if onp := os.Getenv("OLLAMA_MAX_QUEUE"); onp != "" {
  225. p, err := strconv.Atoi(onp)
  226. if err != nil || p <= 0 {
  227. slog.Error("invalid setting, ignoring", "OLLAMA_MAX_QUEUE", onp, "error", err)
  228. } else {
  229. MaxQueuedRequests = p
  230. }
  231. }
  232. ka := clean("OLLAMA_KEEP_ALIVE")
  233. if ka != "" {
  234. loadKeepAlive(ka)
  235. }
  236. var err error
  237. ModelsDir, err = getModelsDir()
  238. if err != nil {
  239. slog.Error("invalid setting", "OLLAMA_MODELS", ModelsDir, "error", err)
  240. }
  241. Host, err = getOllamaHost()
  242. if err != nil {
  243. slog.Error("invalid setting", "OLLAMA_HOST", Host, "error", err, "using default port", Host.Port)
  244. }
  245. if set, err := strconv.ParseBool(clean("OLLAMA_INTEL_GPU")); err == nil {
  246. IntelGpu = set
  247. }
  248. CudaVisibleDevices = clean("CUDA_VISIBLE_DEVICES")
  249. HipVisibleDevices = clean("HIP_VISIBLE_DEVICES")
  250. RocrVisibleDevices = clean("ROCR_VISIBLE_DEVICES")
  251. GpuDeviceOrdinal = clean("GPU_DEVICE_ORDINAL")
  252. HsaOverrideGfxVersion = clean("HSA_OVERRIDE_GFX_VERSION")
  253. if nr := clean("OLLAMA_NEW_RUNNERS"); nr != "" {
  254. d, err := strconv.ParseBool(nr)
  255. if err == nil {
  256. NewRunners = d
  257. }
  258. }
  259. }
  260. func getModelsDir() (string, error) {
  261. if models, exists := os.LookupEnv("OLLAMA_MODELS"); exists {
  262. return models, nil
  263. }
  264. home, err := os.UserHomeDir()
  265. if err != nil {
  266. return "", err
  267. }
  268. return filepath.Join(home, ".ollama", "models"), nil
  269. }
  270. func getOllamaHost() (*OllamaHost, error) {
  271. defaultPort := "11434"
  272. hostVar := os.Getenv("OLLAMA_HOST")
  273. hostVar = strings.TrimSpace(strings.Trim(strings.TrimSpace(hostVar), "\"'"))
  274. scheme, hostport, ok := strings.Cut(hostVar, "://")
  275. switch {
  276. case !ok:
  277. scheme, hostport = "http", hostVar
  278. case scheme == "http":
  279. defaultPort = "80"
  280. case scheme == "https":
  281. defaultPort = "443"
  282. }
  283. // trim trailing slashes
  284. hostport = strings.TrimRight(hostport, "/")
  285. host, port, err := net.SplitHostPort(hostport)
  286. if err != nil {
  287. host, port = "127.0.0.1", defaultPort
  288. if ip := net.ParseIP(strings.Trim(hostport, "[]")); ip != nil {
  289. host = ip.String()
  290. } else if hostport != "" {
  291. host = hostport
  292. }
  293. }
  294. if portNum, err := strconv.ParseInt(port, 10, 32); err != nil || portNum > 65535 || portNum < 0 {
  295. return &OllamaHost{
  296. Scheme: scheme,
  297. Host: host,
  298. Port: defaultPort,
  299. }, ErrInvalidHostPort
  300. }
  301. return &OllamaHost{
  302. Scheme: scheme,
  303. Host: host,
  304. Port: port,
  305. }, nil
  306. }
  307. func loadKeepAlive(ka string) {
  308. v, err := strconv.Atoi(ka)
  309. if err != nil {
  310. d, err := time.ParseDuration(ka)
  311. if err == nil {
  312. if d < 0 {
  313. KeepAlive = time.Duration(math.MaxInt64)
  314. } else {
  315. KeepAlive = d
  316. }
  317. }
  318. } else {
  319. d := time.Duration(v) * time.Second
  320. if d < 0 {
  321. KeepAlive = time.Duration(math.MaxInt64)
  322. } else {
  323. KeepAlive = d
  324. }
  325. }
  326. }