12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289 |
- package server
- import (
- "context"
- "encoding/json"
- "errors"
- "fmt"
- "io"
- "io/fs"
- "log/slog"
- "net"
- "net/http"
- "os"
- "os/signal"
- "path/filepath"
- "reflect"
- "runtime"
- "strings"
- "sync"
- "syscall"
- "time"
- "github.com/gin-contrib/cors"
- "github.com/gin-gonic/gin"
- "golang.org/x/exp/slices"
- "github.com/jmorganca/ollama/api"
- "github.com/jmorganca/ollama/gpu"
- "github.com/jmorganca/ollama/llm"
- "github.com/jmorganca/ollama/openai"
- "github.com/jmorganca/ollama/parser"
- "github.com/jmorganca/ollama/version"
- )
- var mode string = gin.DebugMode
- type Server struct {
- WorkDir string
- }
- func init() {
- switch mode {
- case gin.DebugMode:
- case gin.ReleaseMode:
- case gin.TestMode:
- default:
- mode = gin.DebugMode
- }
- gin.SetMode(mode)
- }
- var loaded struct {
- mu sync.Mutex
- runner llm.LLM
- expireAt time.Time
- expireTimer *time.Timer
- *Model
- *api.Options
- }
- var defaultSessionDuration = 5 * time.Minute
- // load a model into memory if it is not already loaded, it is up to the caller to lock loaded.mu before calling this function
- func load(c *gin.Context, model *Model, opts api.Options, sessionDuration time.Duration) error {
- workDir := c.GetString("workDir")
- needLoad := loaded.runner == nil || // is there a model loaded?
- loaded.ModelPath != model.ModelPath || // has the base model changed?
- !reflect.DeepEqual(loaded.AdapterPaths, model.AdapterPaths) || // have the adapters changed?
- !reflect.DeepEqual(loaded.Options.Runner, opts.Runner) // have the runner options changed?
- if needLoad {
- if loaded.runner != nil {
- slog.Info("changing loaded model")
- loaded.runner.Close()
- loaded.runner = nil
- loaded.Model = nil
- loaded.Options = nil
- }
- llmRunner, err := llm.New(workDir, model.ModelPath, model.AdapterPaths, model.ProjectorPaths, opts)
- if err != nil {
- // some older models are not compatible with newer versions of llama.cpp
- // show a generalized compatibility error until there is a better way to
- // check for model compatibility
- if errors.Is(llm.ErrUnsupportedFormat, err) || strings.Contains(err.Error(), "failed to load model") {
- err = fmt.Errorf("%v: this model may be incompatible with your version of Ollama. If you previously pulled this model, try updating it by running `ollama pull %s`", err, model.ShortName)
- }
- return err
- }
- loaded.Model = model
- loaded.runner = llmRunner
- loaded.Options = &opts
- }
- loaded.expireAt = time.Now().Add(sessionDuration)
- if loaded.expireTimer == nil {
- loaded.expireTimer = time.AfterFunc(sessionDuration, func() {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- if time.Now().Before(loaded.expireAt) {
- return
- }
- if loaded.runner != nil {
- loaded.runner.Close()
- }
- loaded.runner = nil
- loaded.Model = nil
- loaded.Options = nil
- })
- }
- loaded.expireTimer.Reset(sessionDuration)
- return nil
- }
- func modelOptions(model *Model, requestOpts map[string]interface{}) (api.Options, error) {
- opts := api.DefaultOptions()
- if err := opts.FromMap(model.Options); err != nil {
- return api.Options{}, err
- }
- if err := opts.FromMap(requestOpts); err != nil {
- return api.Options{}, err
- }
- return opts, nil
- }
- func isSupportedImageType(image []byte) bool {
- contentType := http.DetectContentType(image)
- allowedTypes := []string{"image/jpeg", "image/jpg", "image/png"}
- return slices.Contains(allowedTypes, contentType)
- }
- func GenerateHandler(c *gin.Context) {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- checkpointStart := time.Now()
- var req api.GenerateRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- // validate the request
- switch {
- case req.Model == "":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- case len(req.Format) > 0 && req.Format != "json":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
- return
- case req.Raw && (req.Template != "" || req.System != "" || len(req.Context) > 0):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "raw mode does not support template, system, or context"})
- return
- }
- for _, img := range req.Images {
- if !isSupportedImageType(img) {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "unsupported image format"})
- return
- }
- }
- model, err := GetModel(req.Model)
- if err != nil {
- var pErr *fs.PathError
- if errors.As(err, &pErr) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- opts, err := modelOptions(model, req.Options)
- if err != nil {
- if errors.Is(err, api.ErrInvalidOpts) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- var sessionDuration time.Duration
- if req.KeepAlive == nil {
- sessionDuration = defaultSessionDuration
- } else {
- sessionDuration = req.KeepAlive.Duration
- }
- if err := load(c, model, opts, sessionDuration); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- // an empty request loads the model
- // note: for a short while template was used in lieu
- // of `raw` mode so we need to check for it too
- if req.Prompt == "" && req.Template == "" && req.System == "" {
- c.JSON(http.StatusOK, api.GenerateResponse{
- CreatedAt: time.Now().UTC(),
- Model: req.Model,
- Done: true,
- })
- return
- }
- checkpointLoaded := time.Now()
- var prompt string
- switch {
- case req.Raw:
- prompt = req.Prompt
- case req.Prompt != "":
- if req.Template == "" {
- req.Template = model.Template
- }
- if req.System == "" {
- req.System = model.System
- }
- slog.Debug("generate handler", "prompt", req.Prompt)
- slog.Debug("generate handler", "template", req.Template)
- slog.Debug("generate handler", "system", req.System)
- var sb strings.Builder
- if req.Context != nil {
- prev, err := loaded.runner.Decode(c.Request.Context(), req.Context)
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- sb.WriteString(prev)
- }
- // write image tags
- // TODO: limit the number of images to fit in the context similar to the chat endpoint
- for i := range req.Images {
- req.Prompt += fmt.Sprintf(" [img-%d]", i)
- }
- p, err := Prompt(req.Template, req.System, req.Prompt, "", true)
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- sb.WriteString(p)
- prompt = sb.String()
- }
- slog.Debug("generate handler", "prompt", prompt)
- ch := make(chan any)
- var generated strings.Builder
- go func() {
- defer close(ch)
- fn := func(r llm.PredictResult) {
- // Update model expiration
- loaded.expireAt = time.Now().Add(sessionDuration)
- loaded.expireTimer.Reset(sessionDuration)
- // Build up the full response
- if _, err := generated.WriteString(r.Content); err != nil {
- ch <- gin.H{"error": err.Error()}
- return
- }
- resp := api.GenerateResponse{
- Model: req.Model,
- CreatedAt: time.Now().UTC(),
- Done: r.Done,
- Response: r.Content,
- Metrics: api.Metrics{
- PromptEvalCount: r.PromptEvalCount,
- PromptEvalDuration: r.PromptEvalDuration,
- EvalCount: r.EvalCount,
- EvalDuration: r.EvalDuration,
- },
- }
- if r.Done {
- resp.TotalDuration = time.Since(checkpointStart)
- resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
- if !req.Raw {
- p, err := Prompt(req.Template, req.System, req.Prompt, generated.String(), false)
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- // TODO (jmorganca): encode() should not strip special tokens
- tokens, err := loaded.runner.Encode(c.Request.Context(), p)
- if err != nil {
- ch <- gin.H{"error": err.Error()}
- return
- }
- resp.Context = append(req.Context, tokens...)
- }
- }
- ch <- resp
- }
- var images []llm.ImageData
- for i := range req.Images {
- images = append(images, llm.ImageData{
- ID: i,
- Data: req.Images[i],
- })
- }
- // Start prediction
- predictReq := llm.PredictOpts{
- Prompt: prompt,
- Format: req.Format,
- Images: images,
- Options: opts,
- }
- if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- // Accumulate responses into the final response
- var final api.GenerateResponse
- var sb strings.Builder
- for resp := range ch {
- switch r := resp.(type) {
- case api.GenerateResponse:
- sb.WriteString(r.Response)
- final = r
- case gin.H:
- if errorMsg, ok := r["error"].(string); ok {
- c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
- return
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
- return
- }
- default:
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
- return
- }
- }
- final.Response = sb.String()
- c.JSON(http.StatusOK, final)
- return
- }
- streamResponse(c, ch)
- }
- func EmbeddingHandler(c *gin.Context) {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- var req api.EmbeddingRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Model == "" {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- model, err := GetModel(req.Model)
- if err != nil {
- var pErr *fs.PathError
- if errors.As(err, &pErr) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- opts, err := modelOptions(model, req.Options)
- if err != nil {
- if errors.Is(err, api.ErrInvalidOpts) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- var sessionDuration time.Duration
- if req.KeepAlive == nil {
- sessionDuration = defaultSessionDuration
- } else {
- sessionDuration = req.KeepAlive.Duration
- }
- if err := load(c, model, opts, sessionDuration); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- if !loaded.Options.EmbeddingOnly {
- c.JSON(http.StatusBadRequest, gin.H{"error": "embedding option must be set to true"})
- return
- }
- embedding, err := loaded.runner.Embedding(c.Request.Context(), req.Prompt)
- if err != nil {
- slog.Info(fmt.Sprintf("embedding generation failed: %v", err))
- c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate embedding"})
- return
- }
- resp := api.EmbeddingResponse{
- Embedding: embedding,
- }
- c.JSON(http.StatusOK, resp)
- }
- func PullModelHandler(c *gin.Context) {
- var req api.PullRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(r api.ProgressResponse) {
- ch <- r
- }
- regOpts := ®istryOptions{
- Insecure: req.Insecure,
- }
- ctx, cancel := context.WithCancel(c.Request.Context())
- defer cancel()
- if err := PullModel(ctx, model, regOpts, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- waitForStream(c, ch)
- return
- }
- streamResponse(c, ch)
- }
- func PushModelHandler(c *gin.Context) {
- var req api.PushRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(r api.ProgressResponse) {
- ch <- r
- }
- regOpts := ®istryOptions{
- Insecure: req.Insecure,
- }
- ctx, cancel := context.WithCancel(c.Request.Context())
- defer cancel()
- if err := PushModel(ctx, model, regOpts, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- waitForStream(c, ch)
- return
- }
- streamResponse(c, ch)
- }
- func CreateModelHandler(c *gin.Context) {
- var req api.CreateRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- if err := ParseModelPath(model).Validate(); err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Path == "" && req.Modelfile == "" {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "path or modelfile are required"})
- return
- }
- var modelfile io.Reader = strings.NewReader(req.Modelfile)
- if req.Path != "" && req.Modelfile == "" {
- mf, err := os.Open(req.Path)
- if err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("error reading modelfile: %s", err)})
- return
- }
- defer mf.Close()
- modelfile = mf
- }
- commands, err := parser.Parse(modelfile)
- if err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(resp api.ProgressResponse) {
- ch <- resp
- }
- ctx, cancel := context.WithCancel(c.Request.Context())
- defer cancel()
- if err := CreateModel(ctx, model, filepath.Dir(req.Path), commands, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- waitForStream(c, ch)
- return
- }
- streamResponse(c, ch)
- }
- func DeleteModelHandler(c *gin.Context) {
- var req api.DeleteRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- var model string
- if req.Model != "" {
- model = req.Model
- } else if req.Name != "" {
- model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- if err := DeleteModel(model); err != nil {
- if os.IsNotExist(err) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", model)})
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- }
- return
- }
- manifestsPath, err := GetManifestPath()
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- if err := PruneDirectory(manifestsPath); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusOK, nil)
- }
- func ShowModelHandler(c *gin.Context) {
- var req api.ShowRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Model != "" {
- // noop
- } else if req.Name != "" {
- req.Model = req.Name
- } else {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- }
- resp, err := GetModelInfo(req)
- if err != nil {
- if os.IsNotExist(err) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- }
- return
- }
- c.JSON(http.StatusOK, resp)
- }
- func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
- model, err := GetModel(req.Model)
- if err != nil {
- return nil, err
- }
- modelDetails := api.ModelDetails{
- ParentModel: model.ParentModel,
- Format: model.Config.ModelFormat,
- Family: model.Config.ModelFamily,
- Families: model.Config.ModelFamilies,
- ParameterSize: model.Config.ModelType,
- QuantizationLevel: model.Config.FileType,
- }
- if req.System != "" {
- model.System = req.System
- }
- if req.Template != "" {
- model.Template = req.Template
- }
- msgs := make([]api.Message, 0)
- for _, msg := range model.Messages {
- msgs = append(msgs, api.Message{Role: msg.Role, Content: msg.Content})
- }
- resp := &api.ShowResponse{
- License: strings.Join(model.License, "\n"),
- System: model.System,
- Template: model.Template,
- Details: modelDetails,
- Messages: msgs,
- }
- var params []string
- cs := 30
- for k, v := range model.Options {
- switch val := v.(type) {
- case []interface{}:
- for _, nv := range val {
- params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv))
- }
- default:
- params = append(params, fmt.Sprintf("%-*s %#v", cs, k, v))
- }
- }
- resp.Parameters = strings.Join(params, "\n")
- for k, v := range req.Options {
- if _, ok := req.Options[k]; ok {
- model.Options[k] = v
- }
- }
- mf, err := ShowModelfile(model)
- if err != nil {
- return nil, err
- }
- resp.Modelfile = mf
- return resp, nil
- }
- func ListModelsHandler(c *gin.Context) {
- models := make([]api.ModelResponse, 0)
- manifestsPath, err := GetManifestPath()
- if err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- modelResponse := func(modelName string) (api.ModelResponse, error) {
- model, err := GetModel(modelName)
- if err != nil {
- return api.ModelResponse{}, err
- }
- modelDetails := api.ModelDetails{
- Format: model.Config.ModelFormat,
- Family: model.Config.ModelFamily,
- Families: model.Config.ModelFamilies,
- ParameterSize: model.Config.ModelType,
- QuantizationLevel: model.Config.FileType,
- }
- return api.ModelResponse{
- Model: model.ShortName,
- Name: model.ShortName,
- Size: model.Size,
- Digest: model.Digest,
- Details: modelDetails,
- }, nil
- }
- walkFunc := func(path string, info os.FileInfo, _ error) error {
- if !info.IsDir() {
- path, tag := filepath.Split(path)
- model := strings.Trim(strings.TrimPrefix(path, manifestsPath), string(os.PathSeparator))
- modelPath := strings.Join([]string{model, tag}, ":")
- canonicalModelPath := strings.ReplaceAll(modelPath, string(os.PathSeparator), "/")
- resp, err := modelResponse(canonicalModelPath)
- if err != nil {
- slog.Info(fmt.Sprintf("skipping file: %s", canonicalModelPath))
- // nolint: nilerr
- return nil
- }
- resp.ModifiedAt = info.ModTime()
- models = append(models, resp)
- }
- return nil
- }
- if err := filepath.Walk(manifestsPath, walkFunc); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusOK, api.ListResponse{Models: models})
- }
- func CopyModelHandler(c *gin.Context) {
- var req api.CopyRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if req.Source == "" || req.Destination == "" {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "source add destination are required"})
- return
- }
- if err := ParseModelPath(req.Destination).Validate(); err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if err := CopyModel(req.Source, req.Destination); err != nil {
- if os.IsNotExist(err) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Source)})
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- }
- return
- }
- }
- func HeadBlobHandler(c *gin.Context) {
- path, err := GetBlobsPath(c.Param("digest"))
- if err != nil {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- if _, err := os.Stat(path); err != nil {
- c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("blob %q not found", c.Param("digest"))})
- return
- }
- c.Status(http.StatusOK)
- }
- func CreateBlobHandler(c *gin.Context) {
- layer, err := NewLayer(c.Request.Body, "")
- if err != nil {
- c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- if layer.Digest != c.Param("digest") {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("digest mismatch, expected %q, got %q", c.Param("digest"), layer.Digest)})
- return
- }
- if _, err := layer.Commit(); err != nil {
- c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- c.Status(http.StatusCreated)
- }
- var defaultAllowOrigins = []string{
- "localhost",
- "127.0.0.1",
- "0.0.0.0",
- }
- func NewServer() (*Server, error) {
- workDir, err := os.MkdirTemp("", "ollama")
- if err != nil {
- return nil, err
- }
- return &Server{
- WorkDir: workDir,
- }, nil
- }
- func (s *Server) GenerateRoutes() http.Handler {
- var origins []string
- if o := os.Getenv("OLLAMA_ORIGINS"); o != "" {
- origins = strings.Split(o, ",")
- }
- config := cors.DefaultConfig()
- config.AllowWildcard = true
- config.AllowBrowserExtensions = true
- config.AllowOrigins = origins
- for _, allowOrigin := range defaultAllowOrigins {
- config.AllowOrigins = append(config.AllowOrigins,
- fmt.Sprintf("http://%s", allowOrigin),
- fmt.Sprintf("https://%s", allowOrigin),
- fmt.Sprintf("http://%s:*", allowOrigin),
- fmt.Sprintf("https://%s:*", allowOrigin),
- )
- }
- r := gin.Default()
- r.Use(
- cors.New(config),
- func(c *gin.Context) {
- c.Set("workDir", s.WorkDir)
- c.Next()
- },
- )
- r.POST("/api/pull", PullModelHandler)
- r.POST("/api/generate", GenerateHandler)
- r.POST("/api/chat", ChatHandler)
- r.POST("/api/embeddings", EmbeddingHandler)
- r.POST("/api/create", CreateModelHandler)
- r.POST("/api/push", PushModelHandler)
- r.POST("/api/copy", CopyModelHandler)
- r.DELETE("/api/delete", DeleteModelHandler)
- r.POST("/api/show", ShowModelHandler)
- r.POST("/api/blobs/:digest", CreateBlobHandler)
- r.HEAD("/api/blobs/:digest", HeadBlobHandler)
- // Compatibility endpoints
- r.POST("/v1/chat/completions", openai.Middleware(), ChatHandler)
- for _, method := range []string{http.MethodGet, http.MethodHead} {
- r.Handle(method, "/", func(c *gin.Context) {
- c.String(http.StatusOK, "Ollama is running")
- })
- r.Handle(method, "/api/tags", ListModelsHandler)
- r.Handle(method, "/api/version", func(c *gin.Context) {
- c.JSON(http.StatusOK, gin.H{"version": version.Version})
- })
- }
- return r
- }
- func Serve(ln net.Listener) error {
- level := slog.LevelInfo
- if debug := os.Getenv("OLLAMA_DEBUG"); debug != "" {
- level = slog.LevelDebug
- }
- handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
- Level: level,
- AddSource: true,
- ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
- if attr.Key == slog.SourceKey {
- source := attr.Value.Any().(*slog.Source)
- source.File = filepath.Base(source.File)
- }
- return attr
- },
- })
- slog.SetDefault(slog.New(handler))
- if noprune := os.Getenv("OLLAMA_NOPRUNE"); noprune == "" {
- // clean up unused layers and manifests
- if err := PruneLayers(); err != nil {
- return err
- }
- manifestsPath, err := GetManifestPath()
- if err != nil {
- return err
- }
- if err := PruneDirectory(manifestsPath); err != nil {
- return err
- }
- }
- s, err := NewServer()
- if err != nil {
- return err
- }
- r := s.GenerateRoutes()
- slog.Info(fmt.Sprintf("Listening on %s (version %s)", ln.Addr(), version.Version))
- srvr := &http.Server{
- Handler: r,
- }
- // listen for a ctrl+c and stop any loaded llm
- signals := make(chan os.Signal, 1)
- signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
- go func() {
- <-signals
- if loaded.runner != nil {
- loaded.runner.Close()
- }
- os.RemoveAll(s.WorkDir)
- os.Exit(0)
- }()
- if err := llm.Init(s.WorkDir); err != nil {
- return fmt.Errorf("unable to initialize llm library %w", err)
- }
- if runtime.GOOS == "linux" { // TODO - windows too
- // check compatibility to log warnings
- if _, err := gpu.CheckVRAM(); err != nil {
- slog.Info(err.Error())
- }
- }
- return srvr.Serve(ln)
- }
- func waitForStream(c *gin.Context, ch chan interface{}) {
- c.Header("Content-Type", "application/json")
- for resp := range ch {
- switch r := resp.(type) {
- case api.ProgressResponse:
- if r.Status == "success" {
- c.JSON(http.StatusOK, r)
- return
- }
- case gin.H:
- if errorMsg, ok := r["error"].(string); ok {
- c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
- return
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in progress response"})
- return
- }
- default:
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected progress response"})
- return
- }
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected end of progress response"})
- }
- func streamResponse(c *gin.Context, ch chan any) {
- c.Header("Content-Type", "application/x-ndjson")
- c.Stream(func(w io.Writer) bool {
- val, ok := <-ch
- if !ok {
- return false
- }
- bts, err := json.Marshal(val)
- if err != nil {
- slog.Info(fmt.Sprintf("streamResponse: json.Marshal failed with %s", err))
- return false
- }
- // Delineate chunks with new-line delimiter
- bts = append(bts, '\n')
- if _, err := w.Write(bts); err != nil {
- slog.Info(fmt.Sprintf("streamResponse: w.Write failed with %s", err))
- return false
- }
- return true
- })
- }
- // ChatPrompt builds up a prompt from a series of messages for the currently `loaded` model
- func chatPrompt(ctx context.Context, template string, messages []api.Message, numCtx int) (string, error) {
- encode := func(s string) ([]int, error) {
- return loaded.runner.Encode(ctx, s)
- }
- prompt, err := ChatPrompt(template, messages, numCtx, encode)
- if err != nil {
- return "", err
- }
- return prompt, nil
- }
- func ChatHandler(c *gin.Context) {
- loaded.mu.Lock()
- defer loaded.mu.Unlock()
- checkpointStart := time.Now()
- var req api.ChatRequest
- err := c.ShouldBindJSON(&req)
- switch {
- case errors.Is(err, io.EOF):
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
- return
- case err != nil:
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- // validate the request
- switch {
- case req.Model == "":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
- return
- case len(req.Format) > 0 && req.Format != "json":
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
- return
- }
- model, err := GetModel(req.Model)
- if err != nil {
- var pErr *fs.PathError
- if errors.As(err, &pErr) {
- c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- opts, err := modelOptions(model, req.Options)
- if err != nil {
- if errors.Is(err, api.ErrInvalidOpts) {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- var sessionDuration time.Duration
- if req.KeepAlive == nil {
- sessionDuration = defaultSessionDuration
- } else {
- sessionDuration = req.KeepAlive.Duration
- }
- if err := load(c, model, opts, sessionDuration); err != nil {
- c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
- return
- }
- checkpointLoaded := time.Now()
- // if the first message is not a system message, then add the model's default system message
- if len(req.Messages) > 0 && req.Messages[0].Role != "system" {
- req.Messages = append([]api.Message{
- {
- Role: "system",
- Content: model.System,
- },
- }, req.Messages...)
- }
- prompt, err := chatPrompt(c.Request.Context(), model.Template, req.Messages, opts.NumCtx)
- if err != nil {
- c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
- return
- }
- // an empty request loads the model
- if len(req.Messages) == 0 || prompt == "" {
- resp := api.ChatResponse{
- CreatedAt: time.Now().UTC(),
- Model: req.Model,
- Done: true,
- Message: api.Message{Role: "assistant"},
- }
- c.JSON(http.StatusOK, resp)
- return
- }
- // only send images that are in the prompt
- var i int
- var images []llm.ImageData
- for _, m := range req.Messages {
- for _, img := range m.Images {
- if !isSupportedImageType(img) {
- c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "unsupported image format"})
- return
- }
- if strings.Contains(prompt, fmt.Sprintf("[img-%d]", i)) {
- images = append(images, llm.ImageData{Data: img, ID: i})
- }
- i += 1
- }
- }
- slog.Debug("chat handler", "prompt", prompt, "images", len(images))
- ch := make(chan any)
- go func() {
- defer close(ch)
- fn := func(r llm.PredictResult) {
- // Update model expiration
- loaded.expireAt = time.Now().Add(sessionDuration)
- loaded.expireTimer.Reset(sessionDuration)
- resp := api.ChatResponse{
- Model: req.Model,
- CreatedAt: time.Now().UTC(),
- Message: api.Message{Role: "assistant", Content: r.Content},
- Done: r.Done,
- Metrics: api.Metrics{
- PromptEvalCount: r.PromptEvalCount,
- PromptEvalDuration: r.PromptEvalDuration,
- EvalCount: r.EvalCount,
- EvalDuration: r.EvalDuration,
- },
- }
- if r.Done {
- resp.TotalDuration = time.Since(checkpointStart)
- resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
- }
- ch <- resp
- }
- // Start prediction
- predictReq := llm.PredictOpts{
- Prompt: prompt,
- Format: req.Format,
- Images: images,
- Options: opts,
- }
- if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
- ch <- gin.H{"error": err.Error()}
- }
- }()
- if req.Stream != nil && !*req.Stream {
- // Accumulate responses into the final response
- var final api.ChatResponse
- var sb strings.Builder
- for resp := range ch {
- switch r := resp.(type) {
- case api.ChatResponse:
- sb.WriteString(r.Message.Content)
- final = r
- case gin.H:
- if errorMsg, ok := r["error"].(string); ok {
- c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
- return
- } else {
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
- return
- }
- default:
- c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
- return
- }
- }
- final.Message = api.Message{Role: "assistant", Content: sb.String()}
- c.JSON(http.StatusOK, final)
- return
- }
- streamResponse(c, ch)
- }
|