|
@@ -167,6 +167,8 @@ var (
|
|
|
MultiUserCache = Bool("OLLAMA_MULTIUSER_CACHE")
|
|
|
// Enable the new Ollama engine
|
|
|
NewEngine = Bool("OLLAMA_NEW_ENGINE")
|
|
|
+ // ContextLength sets the default context length
|
|
|
+ ContextLength = Uint("OLLAMA_CONTEXT_LENGTH", 2048)
|
|
|
)
|
|
|
|
|
|
func String(s string) func() string {
|
|
@@ -252,6 +254,7 @@ func AsMap() map[string]EnvVar {
|
|
|
"OLLAMA_ORIGINS": {"OLLAMA_ORIGINS", AllowedOrigins(), "A comma separated list of allowed origins"},
|
|
|
"OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread(), "Always schedule model across all GPUs"},
|
|
|
"OLLAMA_MULTIUSER_CACHE": {"OLLAMA_MULTIUSER_CACHE", MultiUserCache(), "Optimize prompt caching for multi-user scenarios"},
|
|
|
+ "OLLAMA_CONTEXT_LENGTH": {"OLLAMA_CONTEXT_LENGTH", ContextLength(), "Context length to use unless otherwise specified (default: 2048)"},
|
|
|
"OLLAMA_NEW_ENGINE": {"OLLAMA_NEW_ENGINE", NewEngine(), "Enable the new Ollama engine"},
|
|
|
|
|
|
// Informational
|