123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398 |
- package server
- import (
- "context"
- "encoding/json"
- "errors"
- "fmt"
- "io"
- "io/fs"
- "log/slog"
- "math"
- "net"
- "net/http"
- "net/netip"
- "os"
- "os/signal"
- "path/filepath"
- "reflect"
- "runtime"
- "strconv"
- "strings"
- "sync"
- "syscall"
- "time"
- "github.com/gin-contrib/cors"
- "github.com/gin-gonic/gin"
- "golang.org/x/exp/slices"
- "github.com/ollama/ollama/api"
- "github.com/ollama/ollama/gpu"
- "github.com/ollama/ollama/llm"
- "github.com/ollama/ollama/openai"
- "github.com/ollama/ollama/parser"
- "github.com/ollama/ollama/version"
- )
- var mode string = gin.DebugMode
- type Server struct {
- addr net.Addr
- }
- func init() {
- switch mode {
- case gin.DebugMode:
- case gin.ReleaseMode:
- case gin.TestMode:
- default:
- mode = gin.DebugMode
- }
- gin.SetMode(mode)
- }
- var loaded struct {
- mu sync.Mutex
- runner llm.LLM
- expireAt time.Time
- expireTimer *time.Timer
- *Model
- *api.Options
- }
- var defaultSessionDuration = 5 * time.Minute
- // load a model into memory if it is not already loaded, it is up to the caller to lock loaded.mu before calling this function
- func load(c *gin.Context, model *Model, opts api.Options, sessionDuration time.Duration) error {
- needLoad := loaded.runner == nil || // is there a model loaded?
- loaded.ModelPath != model.ModelPath || // has the base model changed?
- !reflect.DeepEqual(loaded.AdapterPaths, model.AdapterPaths) || // have the adapters changed?
- !reflect.DeepEqual(loaded.Options.Runner, opts.Runner) // have the runner options changed?
- if needLoad {
- if loaded.runner != nil {
- slog.Info("changing loaded model")
- loaded.runner.Close()
- loaded.runner = nil
- loaded.Model = nil
- loaded.Options = nil
- }
- llmRunner, err := llm.New(model.ModelPath, model.AdapterPaths, model.ProjectorPaths, opts)
- if err != nil {
- // some older models are not compatible with newer versions of llama.cpp
- // show a generalized compatibility error until there is a better way to
- // check for model compatibility
- if errors.Is(llm.ErrUnsupportedFormat, err) || strings.Contains(err.Error(), "failed to load model") {
- err = fmt.Errorf("%v: this model may be incompatible with your version of Ollama. If you previously pulled this model, try updating it by running `ollama pull %s`", err, model.ShortName)
- }
- return err
- }
- loaded.Model = model
- loaded.runner = llmRunner
- loaded.Options = &opts
- }
- loaded.expireAt = time.Now().Add(sessionDuration)
- if loaded.expireTimer == nil {
- loaded.expireTimer = time.AfterFunc(sessionDuration, func() {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- if time.Now().Before(loaded.expireAt) {
- return
- }
- if loaded.runner != nil {
- loaded.runner.Close()
- }
- loaded.runner = nil
- loaded.Model = nil
- loaded.Options = nil
- })
- }
- loaded.expireTimer.Reset(sessionDuration)
- return nil
- }
- func modelOptions(model *Model, requestOpts map[string]interface{}) (api.Options, error) {
- opts := api.DefaultOptions()
- if err := opts.FromMap(model.Options); err != nil {
- return api.Options{}, err
- }
- if err := opts.FromMap(requestOpts); err != nil {
- return api.Options{}, err
- }
- return opts, nil
- }
- func isSupportedImageType(image []byte) bool {
- contentType := http.DetectContentType(image)
- allowedTypes := []string{"image/jpeg", "image/jpg", "image/png"}
- return slices.Contains(allowedTypes, contentType)
- }
- func GenerateHandler(c *gin.Context) {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- checkpointStart := time.Now()
- var req api.GenerateRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- // validate the request
- switch {
- case req.Model == "":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- case len(req.Format) > 0 && req.Format != "json":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
- return
- case req.Raw && (req.Template != "" || req.System != "" || len(req.Context) > 0):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "raw mode does not support template, system, or context"})
- return
- }
- for _, img := range req.Images {
- if !isSupportedImageType(img) {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "unsupported image format"})
- return
- }
- }
- model, err := GetModel(req.Model)
- if err != nil {
- var pErr *fs.PathError
- if errors.As(err, &pErr) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- if model.IsEmbedding() {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "embedding models do not support generate"})
- return
- }
- opts, err := modelOptions(model, req.Options)
- if err != nil {
- if errors.Is(err, api.ErrInvalidOpts) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- var sessionDuration time.Duration
- if req.KeepAlive == nil {
- sessionDuration = getDefaultSessionDuration()
- } else {
- sessionDuration = req.KeepAlive.Duration
- }
- if err := load(c, model, opts, sessionDuration); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- // an empty request loads the model
- // note: for a short while template was used in lieu
- // of `raw` mode so we need to check for it too
- if req.Prompt == "" && req.Template == "" && req.System == "" {
- c.JSON(http.StatusOK, api.GenerateResponse{
- CreatedAt: time.Now().UTC(),
- Model: req.Model,
- Done: true,
- })
- return
- }
- checkpointLoaded := time.Now()
- var prompt string
- switch {
- case req.Raw:
- prompt = req.Prompt
- case req.Prompt != "":
- if req.Template == "" {
- req.Template = model.Template
- }
- if req.System == "" {
- req.System = model.System
- }
- slog.Debug("generate handler", "prompt", req.Prompt)
- slog.Debug("generate handler", "template", req.Template)
- slog.Debug("generate handler", "system", req.System)
- var sb strings.Builder
- for i := range req.Images {
- fmt.Fprintf(&sb, "[img-%d] ", i)
- }
- sb.WriteString(req.Prompt)
- p, err := Prompt(req.Template, req.System, sb.String(), "", true)
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- sb.Reset()
- if req.Context != nil {
- prev, err := loaded.runner.Decode(c.Request.Context(), req.Context)
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- sb.WriteString(prev)
- }
- sb.WriteString(p)
- prompt = sb.String()
- }
- slog.Debug("generate handler", "prompt", prompt)
- ch := make(chan any)
- var generated strings.Builder
- go func() {
- defer close(ch)
- fn := func(r llm.PredictResult) {
- // Update model expiration
- loaded.expireAt = time.Now().Add(sessionDuration)
- loaded.expireTimer.Reset(sessionDuration)
- // Build up the full response
- if _, err := generated.WriteString(r.Content); err != nil {
- ch <- gin.H{"error": err.Error()}
- return
- }
- resp := api.GenerateResponse{
- Model: req.Model,
- CreatedAt: time.Now().UTC(),
- Done: r.Done,
- Response: r.Content,
- Metrics: api.Metrics{
- PromptEvalCount: r.PromptEvalCount,
- PromptEvalDuration: r.PromptEvalDuration,
- EvalCount: r.EvalCount,
- EvalDuration: r.EvalDuration,
- },
- }
- if r.Done {
- resp.TotalDuration = time.Since(checkpointStart)
- resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
- if !req.Raw {
- p, err := Prompt(req.Template, req.System, req.Prompt, generated.String(), false)
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- // TODO (jmorganca): encode() should not strip special tokens
- tokens, err := loaded.runner.Encode(c.Request.Context(), p)
- if err != nil {
- ch <- gin.H{"error": err.Error()}
- return
- }
- resp.Context = append(req.Context, tokens...)
- }
- }
- ch <- resp
- }
- var images []llm.ImageData
- for i := range req.Images {
- images = append(images, llm.ImageData{
- ID: i,
- Data: req.Images[i],
- })
- }
- // Start prediction
- predictReq := llm.PredictOpts{
- Prompt: prompt,
- Format: req.Format,
- Images: images,
- Options: opts,
- }
- if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- // Accumulate responses into the final response
- var final api.GenerateResponse
- var sb strings.Builder
- for resp := range ch {
- switch r := resp.(type) {
- case api.GenerateResponse:
- sb.WriteString(r.Response)
- final = r
- case gin.H:
- if errorMsg, ok := r["error"].(string); ok {
- c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
- return
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
- return
- }
- default:
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
- return
- }
- }
- final.Response = sb.String()
- c.JSON(http.StatusOK, final)
- return
- }
- streamResponse(c, ch)
- }
- func getDefaultSessionDuration() time.Duration {
- if t, exists := os.LookupEnv("OLLAMA_KEEP_ALIVE"); exists {
- v, err := strconv.Atoi(t)
- if err != nil {
- d, err := time.ParseDuration(t)
- if err != nil {
- return defaultSessionDuration
- }
- if d < 0 {
- return time.Duration(math.MaxInt64)
- }
- return d
- }
- d := time.Duration(v) * time.Second
- if d < 0 {
- return time.Duration(math.MaxInt64)
- }
- return d
- }
- return defaultSessionDuration
- }
- func EmbeddingsHandler(c *gin.Context) {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- var req api.EmbeddingRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Model == "" {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- model, err := GetModel(req.Model)
- if err != nil {
- var pErr *fs.PathError
- if errors.As(err, &pErr) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- opts, err := modelOptions(model, req.Options)
- if err != nil {
- if errors.Is(err, api.ErrInvalidOpts) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- var sessionDuration time.Duration
- if req.KeepAlive == nil {
- sessionDuration = getDefaultSessionDuration()
- } else {
- sessionDuration = req.KeepAlive.Duration
- }
- if err := load(c, model, opts, sessionDuration); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- // an empty request loads the model
- if req.Prompt == "" {
- c.JSON(http.StatusOK, api.EmbeddingResponse{Embedding: []float64{}})
- return
- }
- embedding, err := loaded.runner.Embedding(c.Request.Context(), req.Prompt)
- if err != nil {
- slog.Info(fmt.Sprintf("embedding generation failed: %v", err))
- c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate embedding"})
- return
- }
- resp := api.EmbeddingResponse{
- Embedding: embedding,
- }
- c.JSON(http.StatusOK, resp)
- }
- func PullModelHandler(c *gin.Context) {
- var req api.PullRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(r api.ProgressResponse) {
- ch <- r
- }
- regOpts := ®istryOptions{
- Insecure: req.Insecure,
- }
- ctx, cancel := context.WithCancel(c.Request.Context())
- defer cancel()
- if err := PullModel(ctx, model, regOpts, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- waitForStream(c, ch)
- return
- }
- streamResponse(c, ch)
- }
- func PushModelHandler(c *gin.Context) {
- var req api.PushRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(r api.ProgressResponse) {
- ch <- r
- }
- regOpts := ®istryOptions{
- Insecure: req.Insecure,
- }
- ctx, cancel := context.WithCancel(c.Request.Context())
- defer cancel()
- if err := PushModel(ctx, model, regOpts, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- waitForStream(c, ch)
- return
- }
- streamResponse(c, ch)
- }
- func CreateModelHandler(c *gin.Context) {
- var req api.CreateRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- if err := ParseModelPath(model).Validate(); err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Path == "" && req.Modelfile == "" {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "path or modelfile are required"})
- return
- }
- var modelfile io.Reader = strings.NewReader(req.Modelfile)
- if req.Path != "" && req.Modelfile == "" {
- mf, err := os.Open(req.Path)
- if err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("error reading modelfile: %s", err)})
- return
- }
- defer mf.Close()
- modelfile = mf
- }
- commands, err := parser.Parse(modelfile)
- if err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(resp api.ProgressResponse) {
- ch <- resp
- }
- ctx, cancel := context.WithCancel(c.Request.Context())
- defer cancel()
- if err := CreateModel(ctx, model, filepath.Dir(req.Path), commands, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- waitForStream(c, ch)
- return
- }
- streamResponse(c, ch)
- }
- func DeleteModelHandler(c *gin.Context) {
- var req api.DeleteRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- if err := DeleteModel(model); err != nil {
- if os.IsNotExist(err) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", model)})
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- }
- return
- }
- manifestsPath, err := GetManifestPath()
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- if err := PruneDirectory(manifestsPath); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusOK, nil)
- }
- func ShowModelHandler(c *gin.Context) {
- var req api.ShowRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Model != "" {
- // noop
- } else if req.Name != "" {
- req.Model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- resp, err := GetModelInfo(req)
- if err != nil {
- if os.IsNotExist(err) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- }
- return
- }
- c.JSON(http.StatusOK, resp)
- }
- func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
- model, err := GetModel(req.Model)
- if err != nil {
- return nil, err
- }
- modelDetails := api.ModelDetails{
- ParentModel: model.ParentModel,
- Format: model.Config.ModelFormat,
- Family: model.Config.ModelFamily,
- Families: model.Config.ModelFamilies,
- ParameterSize: model.Config.ModelType,
- QuantizationLevel: model.Config.FileType,
- }
- if req.System != "" {
- model.System = req.System
- }
- if req.Template != "" {
- model.Template = req.Template
- }
- msgs := make([]api.Message, 0)
- for _, msg := range model.Messages {
- msgs = append(msgs, api.Message{Role: msg.Role, Content: msg.Content})
- }
- resp := &api.ShowResponse{
- License: strings.Join(model.License, "\n"),
- System: model.System,
- Template: model.Template,
- Details: modelDetails,
- Messages: msgs,
- }
- var params []string
- cs := 30
- for k, v := range model.Options {
- switch val := v.(type) {
- case []interface{}:
- for _, nv := range val {
- params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv))
- }
- default:
- params = append(params, fmt.Sprintf("%-*s %#v", cs, k, v))
- }
- }
- resp.Parameters = strings.Join(params, "\n")
- for k, v := range req.Options {
- if _, ok := req.Options[k]; ok {
- model.Options[k] = v
- }
- }
- mf, err := ShowModelfile(model)
- if err != nil {
- return nil, err
- }
- resp.Modelfile = mf
- return resp, nil
- }
- func ListModelsHandler(c *gin.Context) {
- models := make([]api.ModelResponse, 0)
- manifestsPath, err := GetManifestPath()
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- modelResponse := func(modelName string) (api.ModelResponse, error) {
- model, err := GetModel(modelName)
- if err != nil {
- return api.ModelResponse{}, err
- }
- modelDetails := api.ModelDetails{
- Format: model.Config.ModelFormat,
- Family: model.Config.ModelFamily,
- Families: model.Config.ModelFamilies,
- ParameterSize: model.Config.ModelType,
- QuantizationLevel: model.Config.FileType,
- }
- return api.ModelResponse{
- Model: model.ShortName,
- Name: model.ShortName,
- Size: model.Size,
- Digest: model.Digest,
- Details: modelDetails,
- }, nil
- }
- walkFunc := func(path string, info os.FileInfo, _ error) error {
- if !info.IsDir() {
- path, tag := filepath.Split(path)
- model := strings.Trim(strings.TrimPrefix(path, manifestsPath), string(os.PathSeparator))
- modelPath := strings.Join([]string{model, tag}, ":")
- canonicalModelPath := strings.ReplaceAll(modelPath, string(os.PathSeparator), "/")
- resp, err := modelResponse(canonicalModelPath)
- if err != nil {
- slog.Info(fmt.Sprintf("skipping file: %s", canonicalModelPath))
- // nolint: nilerr
- return nil
- }
- resp.ModifiedAt = info.ModTime()
- models = append(models, resp)
- }
- return nil
- }
- if err := filepath.Walk(manifestsPath, walkFunc); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusOK, api.ListResponse{Models: models})
- }
- func CopyModelHandler(c *gin.Context) {
- var req api.CopyRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Source == "" || req.Destination == "" {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "source add destination are required"})
- return
- }
- if err := ParseModelPath(req.Destination).Validate(); err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if err := CopyModel(req.Source, req.Destination); err != nil {
- if os.IsNotExist(err) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Source)})
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- }
- return
- }
- }
- func HeadBlobHandler(c *gin.Context) {
- path, err := GetBlobsPath(c.Param("digest"))
- if err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if _, err := os.Stat(path); err != nil {
- c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("blob %q not found", c.Param("digest"))})
- return
- }
- c.Status(http.StatusOK)
- }
- func CreateBlobHandler(c *gin.Context) {
- layer, err := NewLayer(c.Request.Body, "")
- if err != nil {
- c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- if layer.Digest != c.Param("digest") {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("digest mismatch, expected %q, got %q", c.Param("digest"), layer.Digest)})
- return
- }
- if _, err := layer.Commit(); err != nil {
- c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- c.Status(http.StatusCreated)
- }
- var defaultAllowOrigins = []string{
- "localhost",
- "127.0.0.1",
- "0.0.0.0",
- }
- func isLocalIP(ip netip.Addr) bool {
- if interfaces, err := net.Interfaces(); err == nil {
- for _, iface := range interfaces {
- addrs, err := iface.Addrs()
- if err != nil {
- continue
- }
- for _, a := range addrs {
- if parsed, _, err := net.ParseCIDR(a.String()); err == nil {
- if parsed.String() == ip.String() {
- return true
- }
- }
- }
- }
- }
- return false
- }
- func allowedHost(host string) bool {
- if host == "" || host == "localhost" {
- return true
- }
- if hostname, err := os.Hostname(); err == nil && host == hostname {
- return true
- }
- var tlds = []string{
- "localhost",
- "local",
- "internal",
- }
- // check if the host is a local TLD
- for _, tld := range tlds {
- if strings.HasSuffix(host, "."+tld) {
- return true
- }
- }
- return false
- }
- func allowedHostsMiddleware(addr net.Addr) gin.HandlerFunc {
- return func(c *gin.Context) {
- if addr == nil {
- c.Next()
- return
- }
- if addr, err := netip.ParseAddrPort(addr.String()); err == nil && !addr.Addr().IsLoopback() {
- c.Next()
- return
- }
- host, _, err := net.SplitHostPort(c.Request.Host)
- if err != nil {
- host = c.Request.Host
- }
- if addr, err := netip.ParseAddr(host); err == nil {
- if addr.IsLoopback() || addr.IsPrivate() || addr.IsUnspecified() || isLocalIP(addr) {
- c.Next()
- return
- }
- }
- if allowedHost(host) {
- c.Next()
- return
- }
- c.AbortWithStatus(http.StatusForbidden)
- }
- }
- func (s *Server) GenerateRoutes() http.Handler {
- var origins []string
- if o := os.Getenv("OLLAMA_ORIGINS"); o != "" {
- origins = strings.Split(o, ",")
- }
- config := cors.DefaultConfig()
- config.AllowWildcard = true
- config.AllowBrowserExtensions = true
- config.AllowOrigins = origins
- for _, allowOrigin := range defaultAllowOrigins {
- config.AllowOrigins = append(config.AllowOrigins,
- fmt.Sprintf("http://%s", allowOrigin),
- fmt.Sprintf("https://%s", allowOrigin),
- fmt.Sprintf("http://%s:*", allowOrigin),
- fmt.Sprintf("https://%s:*", allowOrigin),
- )
- }
- r := gin.Default()
- r.Use(
- cors.New(config),
- allowedHostsMiddleware(s.addr),
- )
- r.POST("/api/pull", PullModelHandler)
- r.POST("/api/generate", GenerateHandler)
- r.POST("/api/chat", ChatHandler)
- r.POST("/api/embeddings", EmbeddingsHandler)
- r.POST("/api/create", CreateModelHandler)
- r.POST("/api/push", PushModelHandler)
- r.POST("/api/copy", CopyModelHandler)
- r.DELETE("/api/delete", DeleteModelHandler)
- r.POST("/api/show", ShowModelHandler)
- r.POST("/api/blobs/:digest", CreateBlobHandler)
- r.HEAD("/api/blobs/:digest", HeadBlobHandler)
- // Compatibility endpoints
- r.POST("/v1/chat/completions", openai.Middleware(), ChatHandler)
- for _, method := range []string{http.MethodGet, http.MethodHead} {
- r.Handle(method, "/", func(c *gin.Context) {
- c.String(http.StatusOK, "Ollama is running")
- })
- r.Handle(method, "/api/tags", ListModelsHandler)
- r.Handle(method, "/api/version", func(c *gin.Context) {
- c.JSON(http.StatusOK, gin.H{"version": version.Version})
- })
- }
- return r
- }
- func Serve(ln net.Listener) error {
- level := slog.LevelInfo
- if debug := os.Getenv("OLLAMA_DEBUG"); debug != "" {
- level = slog.LevelDebug
- }
- handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
- Level: level,
- AddSource: true,
- ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
- if attr.Key == slog.SourceKey {
- source := attr.Value.Any().(*slog.Source)
- source.File = filepath.Base(source.File)
- }
- return attr
- },
- })
- slog.SetDefault(slog.New(handler))
- blobsDir, err := GetBlobsPath("")
- if err != nil {
- return err
- }
- if err := fixBlobs(blobsDir); err != nil {
- return err
- }
- if noprune := os.Getenv("OLLAMA_NOPRUNE"); noprune == "" {
- // clean up unused layers and manifests
- if err := PruneLayers(); err != nil {
- return err
- }
- manifestsPath, err := GetManifestPath()
- if err != nil {
- return err
- }
- if err := PruneDirectory(manifestsPath); err != nil {
- return err
- }
- }
- s := &Server{addr: ln.Addr()}
- r := s.GenerateRoutes()
- slog.Info(fmt.Sprintf("Listening on %s (version %s)", ln.Addr(), version.Version))
- srvr := &http.Server{
- Handler: r,
- }
- // listen for a ctrl+c and stop any loaded llm
- signals := make(chan os.Signal, 1)
- signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
- go func() {
- <-signals
- if loaded.runner != nil {
- loaded.runner.Close()
- }
- gpu.Cleanup()
- os.Exit(0)
- }()
- if err := llm.Init(); err != nil {
- return fmt.Errorf("unable to initialize llm library %w", err)
- }
- if runtime.GOOS == "linux" { // TODO - windows too
- // check compatibility to log warnings
- if _, err := gpu.CheckVRAM(); err != nil {
- slog.Info(err.Error())
- }
- }
- return srvr.Serve(ln)
- }
- func waitForStream(c *gin.Context, ch chan interface{}) {
- c.Header("Content-Type", "application/json")
- for resp := range ch {
- switch r := resp.(type) {
- case api.ProgressResponse:
- if r.Status == "success" {
- c.JSON(http.StatusOK, r)
- return
- }
- case gin.H:
- if errorMsg, ok := r["error"].(string); ok {
- c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
- return
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in progress response"})
- return
- }
- default:
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected progress response"})
- return
- }
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected end of progress response"})
- }
- func streamResponse(c *gin.Context, ch chan any) {
- c.Header("Content-Type", "application/x-ndjson")
- c.Stream(func(w io.Writer) bool {
- val, ok := <-ch
- if !ok {
- return false
- }
- bts, err := json.Marshal(val)
- if err != nil {
- slog.Info(fmt.Sprintf("streamResponse: json.Marshal failed with %s", err))
- return false
- }
- // Delineate chunks with new-line delimiter
- bts = append(bts, '\n')
- if _, err := w.Write(bts); err != nil {
- slog.Info(fmt.Sprintf("streamResponse: w.Write failed with %s", err))
- return false
- }
- return true
- })
- }
- // ChatPrompt builds up a prompt from a series of messages for the currently `loaded` model
- func chatPrompt(ctx context.Context, template string, messages []api.Message, numCtx int) (string, error) {
- encode := func(s string) ([]int, error) {
- return loaded.runner.Encode(ctx, s)
- }
- prompt, err := ChatPrompt(template, messages, numCtx, encode)
- if err != nil {
- return "", err
- }
- return prompt, nil
- }
- func ChatHandler(c *gin.Context) {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- checkpointStart := time.Now()
- var req api.ChatRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- // validate the request
- switch {
- case req.Model == "":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- case len(req.Format) > 0 && req.Format != "json":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
- return
- }
- model, err := GetModel(req.Model)
- if err != nil {
- var pErr *fs.PathError
- if errors.As(err, &pErr) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- if model.IsEmbedding() {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "embedding models do not support chat"})
- return
- }
- opts, err := modelOptions(model, req.Options)
- if err != nil {
- if errors.Is(err, api.ErrInvalidOpts) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- var sessionDuration time.Duration
- if req.KeepAlive == nil {
- sessionDuration = getDefaultSessionDuration()
- } else {
- sessionDuration = req.KeepAlive.Duration
- }
- if err := load(c, model, opts, sessionDuration); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- checkpointLoaded := time.Now()
- // if the first message is not a system message, then add the model's default system message
- if len(req.Messages) > 0 && req.Messages[0].Role != "system" {
- req.Messages = append([]api.Message{
- {
- Role: "system",
- Content: model.System,
- },
- }, req.Messages...)
- }
- prompt, err := chatPrompt(c.Request.Context(), model.Template, req.Messages, opts.NumCtx)
- if err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- // an empty request loads the model
- if len(req.Messages) == 0 || prompt == "" {
- resp := api.ChatResponse{
- CreatedAt: time.Now().UTC(),
- Model: req.Model,
- Done: true,
- Message: api.Message{Role: "assistant"},
- }
- c.JSON(http.StatusOK, resp)
- return
- }
- // only send images that are in the prompt
- var i int
- var images []llm.ImageData
- for _, m := range req.Messages {
- for _, img := range m.Images {
- if !isSupportedImageType(img) {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "unsupported image format"})
- return
- }
- if strings.Contains(prompt, fmt.Sprintf("[img-%d]", i)) {
- images = append(images, llm.ImageData{Data: img, ID: i})
- }
- i += 1
- }
- }
- slog.Debug("chat handler", "prompt", prompt, "images", len(images))
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(r llm.PredictResult) {
- // Update model expiration
- loaded.expireAt = time.Now().Add(sessionDuration)
- loaded.expireTimer.Reset(sessionDuration)
- resp := api.ChatResponse{
- Model: req.Model,
- CreatedAt: time.Now().UTC(),
- Message: api.Message{Role: "assistant", Content: r.Content},
- Done: r.Done,
- Metrics: api.Metrics{
- PromptEvalCount: r.PromptEvalCount,
- PromptEvalDuration: r.PromptEvalDuration,
- EvalCount: r.EvalCount,
- EvalDuration: r.EvalDuration,
- },
- }
- if r.Done {
- resp.TotalDuration = time.Since(checkpointStart)
- resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
- }
- ch <- resp
- }
- // Start prediction
- predictReq := llm.PredictOpts{
- Prompt: prompt,
- Format: req.Format,
- Images: images,
- Options: opts,
- }
- if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- // Accumulate responses into the final response
- var final api.ChatResponse
- var sb strings.Builder
- for resp := range ch {
- switch r := resp.(type) {
- case api.ChatResponse:
- sb.WriteString(r.Message.Content)
- final = r
- case gin.H:
- if errorMsg, ok := r["error"].(string); ok {
- c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
- return
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
- return
- }
- default:
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
- return
- }
- }
- final.Message = api.Message{Role: "assistant", Content: sb.String()}
- c.JSON(http.StatusOK, final)
- return
- }
- streamResponse(c, ch)
- }
|