소스 검색

add OLLAMA_MODELS to envconfig (#5029)

Patrick Devine 10 달 전
부모
커밋
94618b2365
8개의 변경된 파일47개의 추가작업 그리고 11개의 파일을 삭제
  1. 21 3
      envconfig/config.go
  2. 2 0
      server/manifest_test.go
  3. 3 8
      server/modelpath.go
  4. 3 0
      server/modelpath_test.go
  5. 10 0
      server/routes_create_test.go
  6. 3 0
      server/routes_delete_test.go
  7. 2 0
      server/routes_list_test.go
  8. 3 0
      server/routes_test.go

+ 21 - 3
envconfig/config.go

@@ -31,6 +31,8 @@ var (
 	Debug bool
 	// Experimental flash attention
 	FlashAttention bool
+	// Set via OLLAMA_HOST in the environment
+	Host *OllamaHost
 	// Set via OLLAMA_KEEP_ALIVE in the environment
 	KeepAlive string
 	// Set via OLLAMA_LLM_LIBRARY in the environment
@@ -39,6 +41,8 @@ var (
 	MaxRunners int
 	// Set via OLLAMA_MAX_QUEUE in the environment
 	MaxQueuedRequests int
+	// Set via OLLAMA_MODELS in the environment
+	ModelsDir string
 	// Set via OLLAMA_MAX_VRAM in the environment
 	MaxVRAM uint64
 	// Set via OLLAMA_NOHISTORY in the environment
@@ -47,8 +51,6 @@ var (
 	NoPrune bool
 	// Set via OLLAMA_NUM_PARALLEL in the environment
 	NumParallel int
-	// Set via OLLAMA_HOST in the environment
-	Host *OllamaHost
 	// Set via OLLAMA_RUNNERS_DIR in the environment
 	RunnersDir string
 	// Set via OLLAMA_TMPDIR in the environment
@@ -71,7 +73,7 @@ func AsMap() map[string]EnvVar {
 		"OLLAMA_MAX_LOADED_MODELS": {"OLLAMA_MAX_LOADED_MODELS", MaxRunners, "Maximum number of loaded models (default 1)"},
 		"OLLAMA_MAX_QUEUE":         {"OLLAMA_MAX_QUEUE", MaxQueuedRequests, "Maximum number of queued requests"},
 		"OLLAMA_MAX_VRAM":          {"OLLAMA_MAX_VRAM", MaxVRAM, "Maximum VRAM"},
-		"OLLAMA_MODELS":            {"OLLAMA_MODELS", "", "The path to the models directory"},
+		"OLLAMA_MODELS":            {"OLLAMA_MODELS", ModelsDir, "The path to the models directory"},
 		"OLLAMA_NOHISTORY":         {"OLLAMA_NOHISTORY", NoHistory, "Do not preserve readline history"},
 		"OLLAMA_NOPRUNE":           {"OLLAMA_NOPRUNE", NoPrune, "Do not prune model blobs on startup"},
 		"OLLAMA_NUM_PARALLEL":      {"OLLAMA_NUM_PARALLEL", NumParallel, "Maximum number of parallel requests (default 1)"},
@@ -233,12 +235,28 @@ func LoadConfig() {
 	KeepAlive = clean("OLLAMA_KEEP_ALIVE")
 
 	var err error
+	ModelsDir, err = getModelsDir()
+	if err != nil {
+		slog.Error("invalid setting", "OLLAMA_MODELS", ModelsDir, "error", err)
+	}
+
 	Host, err = getOllamaHost()
 	if err != nil {
 		slog.Error("invalid setting", "OLLAMA_HOST", Host, "error", err, "using default port", Host.Port)
 	}
 }
 
+func getModelsDir() (string, error) {
+	if models, exists := os.LookupEnv("OLLAMA_MODELS"); exists {
+		return models, nil
+	}
+	home, err := os.UserHomeDir()
+	if err != nil {
+		return "", err
+	}
+	return filepath.Join(home, ".ollama", "models"), nil
+}
+
 func getOllamaHost() (*OllamaHost, error) {
 	defaultPort := "11434"
 

+ 2 - 0
server/manifest_test.go

@@ -7,6 +7,7 @@ import (
 	"slices"
 	"testing"
 
+	"github.com/ollama/ollama/envconfig"
 	"github.com/ollama/ollama/types/model"
 )
 
@@ -107,6 +108,7 @@ func TestManifests(t *testing.T) {
 		t.Run(n, func(t *testing.T) {
 			d := t.TempDir()
 			t.Setenv("OLLAMA_MODELS", d)
+			envconfig.LoadConfig()
 
 			for _, p := range wants.ps {
 				createManifest(t, d, p)

+ 3 - 8
server/modelpath.go

@@ -8,6 +8,8 @@ import (
 	"path/filepath"
 	"regexp"
 	"strings"
+
+	"github.com/ollama/ollama/envconfig"
 )
 
 type ModelPath struct {
@@ -104,14 +106,7 @@ func (mp ModelPath) GetShortTagname() string {
 // modelsDir returns the value of the OLLAMA_MODELS environment variable or the user's home directory if OLLAMA_MODELS is not set.
 // The models directory is where Ollama stores its model files and manifests.
 func modelsDir() (string, error) {
-	if models, exists := os.LookupEnv("OLLAMA_MODELS"); exists {
-		return models, nil
-	}
-	home, err := os.UserHomeDir()
-	if err != nil {
-		return "", err
-	}
-	return filepath.Join(home, ".ollama", "models"), nil
+	return envconfig.ModelsDir, nil
 }
 
 // GetManifestPath returns the path to the manifest file for the given model path, it is up to the caller to create the directory if it does not exist.

+ 3 - 0
server/modelpath_test.go

@@ -7,6 +7,8 @@ import (
 
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
+
+	"github.com/ollama/ollama/envconfig"
 )
 
 func TestGetBlobsPath(t *testing.T) {
@@ -61,6 +63,7 @@ func TestGetBlobsPath(t *testing.T) {
 	for _, tc := range tests {
 		t.Run(tc.name, func(t *testing.T) {
 			t.Setenv("OLLAMA_MODELS", dir)
+			envconfig.LoadConfig()
 
 			got, err := GetBlobsPath(tc.digest)
 

+ 10 - 0
server/routes_create_test.go

@@ -15,6 +15,7 @@ import (
 
 	"github.com/gin-gonic/gin"
 	"github.com/ollama/ollama/api"
+	"github.com/ollama/ollama/envconfig"
 	"github.com/ollama/ollama/llm"
 )
 
@@ -86,6 +87,7 @@ func checkFileExists(t *testing.T, p string, expect []string) {
 func TestCreateFromBin(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 
 	var s Server
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -111,6 +113,7 @@ func TestCreateFromBin(t *testing.T) {
 func TestCreateFromModel(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -151,6 +154,7 @@ func TestCreateFromModel(t *testing.T) {
 func TestCreateRemovesLayers(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -197,6 +201,7 @@ func TestCreateRemovesLayers(t *testing.T) {
 func TestCreateUnsetsSystem(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -252,6 +257,7 @@ func TestCreateUnsetsSystem(t *testing.T) {
 func TestCreateMergeParameters(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -354,6 +360,7 @@ func TestCreateMergeParameters(t *testing.T) {
 func TestCreateReplacesMessages(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -429,6 +436,7 @@ func TestCreateReplacesMessages(t *testing.T) {
 func TestCreateTemplateSystem(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -474,6 +482,7 @@ func TestCreateTemplateSystem(t *testing.T) {
 func TestCreateLicenses(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{
@@ -519,6 +528,7 @@ func TestCreateLicenses(t *testing.T) {
 func TestCreateDetectTemplate(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
 	var s Server
 
 	t.Run("matched", func(t *testing.T) {

+ 3 - 0
server/routes_delete_test.go

@@ -9,12 +9,15 @@ import (
 	"testing"
 
 	"github.com/ollama/ollama/api"
+	"github.com/ollama/ollama/envconfig"
 	"github.com/ollama/ollama/types/model"
 )
 
 func TestDelete(t *testing.T) {
 	p := t.TempDir()
 	t.Setenv("OLLAMA_MODELS", p)
+	envconfig.LoadConfig()
+
 	var s Server
 
 	w := createRequest(t, s.CreateModelHandler, api.CreateRequest{

+ 2 - 0
server/routes_list_test.go

@@ -8,10 +8,12 @@ import (
 	"testing"
 
 	"github.com/ollama/ollama/api"
+	"github.com/ollama/ollama/envconfig"
 )
 
 func TestList(t *testing.T) {
 	t.Setenv("OLLAMA_MODELS", t.TempDir())
+	envconfig.LoadConfig()
 
 	expectNames := []string{
 		"mistral:7b-instruct-q4_0",

+ 3 - 0
server/routes_test.go

@@ -18,6 +18,7 @@ import (
 	"github.com/stretchr/testify/require"
 
 	"github.com/ollama/ollama/api"
+	"github.com/ollama/ollama/envconfig"
 	"github.com/ollama/ollama/parser"
 	"github.com/ollama/ollama/types/model"
 	"github.com/ollama/ollama/version"
@@ -216,6 +217,7 @@ func Test_Routes(t *testing.T) {
 	}
 
 	t.Setenv("OLLAMA_MODELS", t.TempDir())
+	envconfig.LoadConfig()
 
 	s := &Server{}
 	router := s.GenerateRoutes()
@@ -246,6 +248,7 @@ func Test_Routes(t *testing.T) {
 
 func TestCase(t *testing.T) {
 	t.Setenv("OLLAMA_MODELS", t.TempDir())
+	envconfig.LoadConfig()
 
 	cases := []string{
 		"mistral",