Ver Fonte

enable `cache_prompt` by default

Jeffrey Morgan há 1 ano atrás
pai
commit
d4ebdadbe7
2 ficheiros alterados com 2 adições e 3 exclusões
  1. 0 1
      api/types.go
  2. 2 2
      llm/ext_server.go

+ 0 - 1
api/types.go

@@ -103,7 +103,6 @@ type Options struct {
 	MirostatEta      float32  `json:"mirostat_eta,omitempty"`
 	MirostatEta      float32  `json:"mirostat_eta,omitempty"`
 	PenalizeNewline  bool     `json:"penalize_newline,omitempty"`
 	PenalizeNewline  bool     `json:"penalize_newline,omitempty"`
 	Stop             []string `json:"stop,omitempty"`
 	Stop             []string `json:"stop,omitempty"`
-	Cache            bool     `json:"cache,omitempty"`
 }
 }
 
 
 // Runner options which must be set when the model is loaded into memory
 // Runner options which must be set when the model is loaded into memory

+ 2 - 2
llm/ext_server.go

@@ -234,8 +234,8 @@ func predict(llm extServer, opts api.Options, ctx context.Context, predict Predi
 		"penalize_nl":       opts.PenalizeNewline,
 		"penalize_nl":       opts.PenalizeNewline,
 		"seed":              opts.Seed,
 		"seed":              opts.Seed,
 		"stop":              opts.Stop,
 		"stop":              opts.Stop,
-		"image_data":        imageData,		
-		"cache_prompt":      opts.Cache,
+		"image_data":        imageData,
+		"cache_prompt":      true,
 	}
 	}
 
 
 	if predict.Format == "json" {
 	if predict.Format == "json" {