routes.go 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335
  1. package server
  2. import (
  3. "context"
  4. "encoding/json"
  5. "errors"
  6. "fmt"
  7. "io"
  8. "io/fs"
  9. "log/slog"
  10. "net"
  11. "net/http"
  12. "os"
  13. "os/signal"
  14. "path/filepath"
  15. "reflect"
  16. "runtime"
  17. "strings"
  18. "sync"
  19. "syscall"
  20. "time"
  21. "github.com/gin-contrib/cors"
  22. "github.com/gin-gonic/gin"
  23. "github.com/jmorganca/ollama/api"
  24. "github.com/jmorganca/ollama/gpu"
  25. "github.com/jmorganca/ollama/llm"
  26. "github.com/jmorganca/ollama/parser"
  27. "github.com/jmorganca/ollama/version"
  28. )
  29. var mode string = gin.DebugMode
  30. type Server struct {
  31. WorkDir string
  32. }
  33. func init() {
  34. switch mode {
  35. case gin.DebugMode:
  36. case gin.ReleaseMode:
  37. case gin.TestMode:
  38. default:
  39. mode = gin.DebugMode
  40. }
  41. gin.SetMode(mode)
  42. }
  43. var loaded struct {
  44. mu sync.Mutex
  45. runner llm.LLM
  46. expireAt time.Time
  47. expireTimer *time.Timer
  48. *Model
  49. *api.Options
  50. }
  51. var defaultSessionDuration = 5 * time.Minute
  52. // load a model into memory if it is not already loaded, it is up to the caller to lock loaded.mu before calling this function
  53. func load(c *gin.Context, model *Model, opts api.Options, sessionDuration time.Duration) error {
  54. workDir := c.GetString("workDir")
  55. needLoad := loaded.runner == nil || // is there a model loaded?
  56. loaded.ModelPath != model.ModelPath || // has the base model changed?
  57. !reflect.DeepEqual(loaded.AdapterPaths, model.AdapterPaths) || // have the adapters changed?
  58. !reflect.DeepEqual(loaded.Options.Runner, opts.Runner) // have the runner options changed?
  59. if needLoad {
  60. if loaded.runner != nil {
  61. slog.Info("changing loaded model")
  62. loaded.runner.Close()
  63. loaded.runner = nil
  64. loaded.Model = nil
  65. loaded.Options = nil
  66. }
  67. llmRunner, err := llm.New(workDir, model.ModelPath, model.AdapterPaths, model.ProjectorPaths, opts)
  68. if err != nil {
  69. // some older models are not compatible with newer versions of llama.cpp
  70. // show a generalized compatibility error until there is a better way to
  71. // check for model compatibility
  72. if errors.Is(llm.ErrUnsupportedFormat, err) || strings.Contains(err.Error(), "failed to load model") {
  73. err = fmt.Errorf("%v: this model may be incompatible with your version of Ollama. If you previously pulled this model, try updating it by running `ollama pull %s`", err, model.ShortName)
  74. }
  75. return err
  76. }
  77. loaded.Model = model
  78. loaded.runner = llmRunner
  79. loaded.Options = &opts
  80. }
  81. loaded.expireAt = time.Now().Add(sessionDuration)
  82. if loaded.expireTimer == nil {
  83. loaded.expireTimer = time.AfterFunc(sessionDuration, func() {
  84. loaded.mu.Lock()
  85. defer loaded.mu.Unlock()
  86. if time.Now().Before(loaded.expireAt) {
  87. return
  88. }
  89. if loaded.runner != nil {
  90. loaded.runner.Close()
  91. }
  92. loaded.runner = nil
  93. loaded.Model = nil
  94. loaded.Options = nil
  95. })
  96. }
  97. loaded.expireTimer.Reset(sessionDuration)
  98. return nil
  99. }
  100. func modelOptions(model *Model, requestOpts map[string]interface{}) (api.Options, error) {
  101. opts := api.DefaultOptions()
  102. if err := opts.FromMap(model.Options); err != nil {
  103. return api.Options{}, err
  104. }
  105. if err := opts.FromMap(requestOpts); err != nil {
  106. return api.Options{}, err
  107. }
  108. return opts, nil
  109. }
  110. func GenerateHandler(c *gin.Context) {
  111. loaded.mu.Lock()
  112. defer loaded.mu.Unlock()
  113. checkpointStart := time.Now()
  114. var req api.GenerateRequest
  115. err := c.ShouldBindJSON(&req)
  116. switch {
  117. case errors.Is(err, io.EOF):
  118. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  119. return
  120. case err != nil:
  121. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  122. return
  123. }
  124. // validate the request
  125. switch {
  126. case req.Model == "":
  127. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  128. return
  129. case len(req.Format) > 0 && req.Format != "json":
  130. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
  131. return
  132. case req.Raw && (req.Template != "" || req.System != "" || len(req.Context) > 0):
  133. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "raw mode does not support template, system, or context"})
  134. return
  135. }
  136. model, err := GetModel(req.Model)
  137. if err != nil {
  138. var pErr *fs.PathError
  139. if errors.As(err, &pErr) {
  140. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
  141. return
  142. }
  143. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  144. return
  145. }
  146. opts, err := modelOptions(model, req.Options)
  147. if err != nil {
  148. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  149. return
  150. }
  151. var sessionDuration time.Duration
  152. if req.KeepAlive == nil {
  153. sessionDuration = defaultSessionDuration
  154. } else {
  155. sessionDuration = req.KeepAlive.Duration
  156. }
  157. if err := load(c, model, opts, sessionDuration); err != nil {
  158. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  159. return
  160. }
  161. // an empty request loads the model
  162. if req.Prompt == "" && req.Template == "" && req.System == "" {
  163. c.JSON(http.StatusOK, api.GenerateResponse{
  164. CreatedAt: time.Now().UTC(),
  165. Model: req.Model,
  166. Done: true,
  167. })
  168. return
  169. }
  170. checkpointLoaded := time.Now()
  171. var prompt string
  172. var promptVars PromptVars
  173. switch {
  174. case req.Raw:
  175. prompt = req.Prompt
  176. case req.Prompt != "":
  177. if req.Template != "" {
  178. // override the default model template
  179. model.Template = req.Template
  180. }
  181. var rebuild strings.Builder
  182. if req.Context != nil {
  183. // TODO: context is deprecated, at some point the context logic within this conditional should be removed
  184. prevCtx, err := loaded.runner.Decode(c.Request.Context(), req.Context)
  185. if err != nil {
  186. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  187. return
  188. }
  189. // Remove leading spaces from prevCtx if present
  190. prevCtx = strings.TrimPrefix(prevCtx, " ")
  191. rebuild.WriteString(prevCtx)
  192. }
  193. promptVars = PromptVars{
  194. System: req.System,
  195. Prompt: req.Prompt,
  196. First: len(req.Context) == 0,
  197. }
  198. if promptVars.System == "" {
  199. promptVars.System = model.System
  200. }
  201. for i := range req.Images {
  202. promptVars.Prompt += fmt.Sprintf(" [img-%d]", i)
  203. }
  204. p, err := model.PreResponsePrompt(promptVars)
  205. if err != nil {
  206. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  207. return
  208. }
  209. rebuild.WriteString(p)
  210. prompt = rebuild.String()
  211. }
  212. slog.Debug("generate handler", "prompt", prompt)
  213. ch := make(chan any)
  214. var generated strings.Builder
  215. go func() {
  216. defer close(ch)
  217. fn := func(r llm.PredictResult) {
  218. // Update model expiration
  219. loaded.expireAt = time.Now().Add(sessionDuration)
  220. loaded.expireTimer.Reset(sessionDuration)
  221. // Build up the full response
  222. if _, err := generated.WriteString(r.Content); err != nil {
  223. ch <- gin.H{"error": err.Error()}
  224. return
  225. }
  226. resp := api.GenerateResponse{
  227. Model: req.Model,
  228. CreatedAt: time.Now().UTC(),
  229. Done: r.Done,
  230. Response: r.Content,
  231. Metrics: api.Metrics{
  232. PromptEvalCount: r.PromptEvalCount,
  233. PromptEvalDuration: r.PromptEvalDuration,
  234. EvalCount: r.EvalCount,
  235. EvalDuration: r.EvalDuration,
  236. },
  237. }
  238. if r.Done {
  239. resp.TotalDuration = time.Since(checkpointStart)
  240. resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
  241. if !req.Raw {
  242. // append the generated text to the history and template it if needed
  243. promptVars.Response = generated.String()
  244. result, err := model.PostResponseTemplate(promptVars)
  245. if err != nil {
  246. ch <- gin.H{"error": err.Error()}
  247. return
  248. }
  249. embd, err := loaded.runner.Encode(c.Request.Context(), prompt+result)
  250. if err != nil {
  251. ch <- gin.H{"error": err.Error()}
  252. return
  253. }
  254. resp.Context = embd
  255. }
  256. }
  257. ch <- resp
  258. }
  259. var images []llm.ImageData
  260. for i := range req.Images {
  261. images = append(images, llm.ImageData{
  262. ID: i,
  263. Data: req.Images[i],
  264. })
  265. }
  266. // Start prediction
  267. predictReq := llm.PredictOpts{
  268. Prompt: prompt,
  269. Format: req.Format,
  270. Images: images,
  271. Options: opts,
  272. }
  273. if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
  274. ch <- gin.H{"error": err.Error()}
  275. }
  276. }()
  277. if req.Stream != nil && !*req.Stream {
  278. // Accumulate responses into the final response
  279. var final api.GenerateResponse
  280. var sb strings.Builder
  281. for resp := range ch {
  282. switch r := resp.(type) {
  283. case api.GenerateResponse:
  284. sb.WriteString(r.Response)
  285. final = r
  286. case gin.H:
  287. if errorMsg, ok := r["error"].(string); ok {
  288. c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
  289. return
  290. } else {
  291. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
  292. return
  293. }
  294. default:
  295. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
  296. return
  297. }
  298. }
  299. final.Response = sb.String()
  300. c.JSON(http.StatusOK, final)
  301. return
  302. }
  303. streamResponse(c, ch)
  304. }
  305. func EmbeddingHandler(c *gin.Context) {
  306. loaded.mu.Lock()
  307. defer loaded.mu.Unlock()
  308. var req api.EmbeddingRequest
  309. err := c.ShouldBindJSON(&req)
  310. switch {
  311. case errors.Is(err, io.EOF):
  312. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  313. return
  314. case err != nil:
  315. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  316. return
  317. }
  318. if req.Model == "" {
  319. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  320. return
  321. }
  322. model, err := GetModel(req.Model)
  323. if err != nil {
  324. var pErr *fs.PathError
  325. if errors.As(err, &pErr) {
  326. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
  327. return
  328. }
  329. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  330. return
  331. }
  332. opts, err := modelOptions(model, req.Options)
  333. if err != nil {
  334. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  335. return
  336. }
  337. var sessionDuration time.Duration
  338. if req.KeepAlive == nil {
  339. sessionDuration = defaultSessionDuration
  340. } else {
  341. sessionDuration = req.KeepAlive.Duration
  342. }
  343. if err := load(c, model, opts, sessionDuration); err != nil {
  344. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  345. return
  346. }
  347. if !loaded.Options.EmbeddingOnly {
  348. c.JSON(http.StatusBadRequest, gin.H{"error": "embedding option must be set to true"})
  349. return
  350. }
  351. embedding, err := loaded.runner.Embedding(c.Request.Context(), req.Prompt)
  352. if err != nil {
  353. slog.Info(fmt.Sprintf("embedding generation failed: %v", err))
  354. c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate embedding"})
  355. return
  356. }
  357. resp := api.EmbeddingResponse{
  358. Embedding: embedding,
  359. }
  360. c.JSON(http.StatusOK, resp)
  361. }
  362. func PullModelHandler(c *gin.Context) {
  363. var req api.PullRequest
  364. err := c.ShouldBindJSON(&req)
  365. switch {
  366. case errors.Is(err, io.EOF):
  367. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  368. return
  369. case err != nil:
  370. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  371. return
  372. }
  373. var model string
  374. if req.Model != "" {
  375. model = req.Model
  376. } else if req.Name != "" {
  377. model = req.Name
  378. } else {
  379. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  380. return
  381. }
  382. ch := make(chan any)
  383. go func() {
  384. defer close(ch)
  385. fn := func(r api.ProgressResponse) {
  386. ch <- r
  387. }
  388. regOpts := &RegistryOptions{
  389. Insecure: req.Insecure,
  390. }
  391. ctx, cancel := context.WithCancel(c.Request.Context())
  392. defer cancel()
  393. if err := PullModel(ctx, model, regOpts, fn); err != nil {
  394. ch <- gin.H{"error": err.Error()}
  395. }
  396. }()
  397. if req.Stream != nil && !*req.Stream {
  398. waitForStream(c, ch)
  399. return
  400. }
  401. streamResponse(c, ch)
  402. }
  403. func PushModelHandler(c *gin.Context) {
  404. var req api.PushRequest
  405. err := c.ShouldBindJSON(&req)
  406. switch {
  407. case errors.Is(err, io.EOF):
  408. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  409. return
  410. case err != nil:
  411. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  412. return
  413. }
  414. var model string
  415. if req.Model != "" {
  416. model = req.Model
  417. } else if req.Name != "" {
  418. model = req.Name
  419. } else {
  420. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  421. return
  422. }
  423. ch := make(chan any)
  424. go func() {
  425. defer close(ch)
  426. fn := func(r api.ProgressResponse) {
  427. ch <- r
  428. }
  429. regOpts := &RegistryOptions{
  430. Insecure: req.Insecure,
  431. }
  432. ctx, cancel := context.WithCancel(c.Request.Context())
  433. defer cancel()
  434. if err := PushModel(ctx, model, regOpts, fn); err != nil {
  435. ch <- gin.H{"error": err.Error()}
  436. }
  437. }()
  438. if req.Stream != nil && !*req.Stream {
  439. waitForStream(c, ch)
  440. return
  441. }
  442. streamResponse(c, ch)
  443. }
  444. func CreateModelHandler(c *gin.Context) {
  445. var req api.CreateRequest
  446. err := c.ShouldBindJSON(&req)
  447. switch {
  448. case errors.Is(err, io.EOF):
  449. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  450. return
  451. case err != nil:
  452. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  453. return
  454. }
  455. var model string
  456. if req.Model != "" {
  457. model = req.Model
  458. } else if req.Name != "" {
  459. model = req.Name
  460. } else {
  461. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  462. return
  463. }
  464. if err := ParseModelPath(model).Validate(); err != nil {
  465. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  466. return
  467. }
  468. if req.Path == "" && req.Modelfile == "" {
  469. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "path or modelfile are required"})
  470. return
  471. }
  472. var modelfile io.Reader = strings.NewReader(req.Modelfile)
  473. if req.Path != "" && req.Modelfile == "" {
  474. mf, err := os.Open(req.Path)
  475. if err != nil {
  476. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("error reading modelfile: %s", err)})
  477. return
  478. }
  479. defer mf.Close()
  480. modelfile = mf
  481. }
  482. commands, err := parser.Parse(modelfile)
  483. if err != nil {
  484. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  485. return
  486. }
  487. ch := make(chan any)
  488. go func() {
  489. defer close(ch)
  490. fn := func(resp api.ProgressResponse) {
  491. ch <- resp
  492. }
  493. ctx, cancel := context.WithCancel(c.Request.Context())
  494. defer cancel()
  495. if err := CreateModel(ctx, model, filepath.Dir(req.Path), commands, fn); err != nil {
  496. ch <- gin.H{"error": err.Error()}
  497. }
  498. }()
  499. if req.Stream != nil && !*req.Stream {
  500. waitForStream(c, ch)
  501. return
  502. }
  503. streamResponse(c, ch)
  504. }
  505. func DeleteModelHandler(c *gin.Context) {
  506. var req api.DeleteRequest
  507. err := c.ShouldBindJSON(&req)
  508. switch {
  509. case errors.Is(err, io.EOF):
  510. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  511. return
  512. case err != nil:
  513. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  514. return
  515. }
  516. var model string
  517. if req.Model != "" {
  518. model = req.Model
  519. } else if req.Name != "" {
  520. model = req.Name
  521. } else {
  522. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  523. return
  524. }
  525. if err := DeleteModel(model); err != nil {
  526. if os.IsNotExist(err) {
  527. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", model)})
  528. } else {
  529. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  530. }
  531. return
  532. }
  533. manifestsPath, err := GetManifestPath()
  534. if err != nil {
  535. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  536. return
  537. }
  538. if err := PruneDirectory(manifestsPath); err != nil {
  539. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  540. return
  541. }
  542. c.JSON(http.StatusOK, nil)
  543. }
  544. func ShowModelHandler(c *gin.Context) {
  545. var req api.ShowRequest
  546. err := c.ShouldBindJSON(&req)
  547. switch {
  548. case errors.Is(err, io.EOF):
  549. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  550. return
  551. case err != nil:
  552. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  553. return
  554. }
  555. if req.Model != "" {
  556. // noop
  557. } else if req.Name != "" {
  558. req.Model = req.Name
  559. } else {
  560. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  561. return
  562. }
  563. resp, err := GetModelInfo(req)
  564. if err != nil {
  565. if os.IsNotExist(err) {
  566. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  567. } else {
  568. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  569. }
  570. return
  571. }
  572. c.JSON(http.StatusOK, resp)
  573. }
  574. func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
  575. model, err := GetModel(req.Model)
  576. if err != nil {
  577. return nil, err
  578. }
  579. modelDetails := api.ModelDetails{
  580. ParentModel: model.ParentModel,
  581. Format: model.Config.ModelFormat,
  582. Family: model.Config.ModelFamily,
  583. Families: model.Config.ModelFamilies,
  584. ParameterSize: model.Config.ModelType,
  585. QuantizationLevel: model.Config.FileType,
  586. }
  587. if req.System != "" {
  588. model.System = req.System
  589. }
  590. if req.Template != "" {
  591. model.Template = req.Template
  592. }
  593. msgs := make([]api.Message, 0)
  594. for _, msg := range model.Messages {
  595. msgs = append(msgs, api.Message{Role: msg.Role, Content: msg.Content})
  596. }
  597. resp := &api.ShowResponse{
  598. License: strings.Join(model.License, "\n"),
  599. System: model.System,
  600. Template: model.Template,
  601. Details: modelDetails,
  602. Messages: msgs,
  603. }
  604. var params []string
  605. cs := 30
  606. for k, v := range model.Options {
  607. switch val := v.(type) {
  608. case []interface{}:
  609. for _, nv := range val {
  610. params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv))
  611. }
  612. default:
  613. params = append(params, fmt.Sprintf("%-*s %#v", cs, k, v))
  614. }
  615. }
  616. resp.Parameters = strings.Join(params, "\n")
  617. for k, v := range req.Options {
  618. if _, ok := req.Options[k]; ok {
  619. model.Options[k] = v
  620. }
  621. }
  622. mf, err := ShowModelfile(model)
  623. if err != nil {
  624. return nil, err
  625. }
  626. resp.Modelfile = mf
  627. return resp, nil
  628. }
  629. func ListModelsHandler(c *gin.Context) {
  630. models := make([]api.ModelResponse, 0)
  631. manifestsPath, err := GetManifestPath()
  632. if err != nil {
  633. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  634. return
  635. }
  636. modelResponse := func(modelName string) (api.ModelResponse, error) {
  637. model, err := GetModel(modelName)
  638. if err != nil {
  639. return api.ModelResponse{}, err
  640. }
  641. modelDetails := api.ModelDetails{
  642. Format: model.Config.ModelFormat,
  643. Family: model.Config.ModelFamily,
  644. Families: model.Config.ModelFamilies,
  645. ParameterSize: model.Config.ModelType,
  646. QuantizationLevel: model.Config.FileType,
  647. }
  648. return api.ModelResponse{
  649. Model: model.ShortName,
  650. Name: model.ShortName,
  651. Size: model.Size,
  652. Digest: model.Digest,
  653. Details: modelDetails,
  654. }, nil
  655. }
  656. walkFunc := func(path string, info os.FileInfo, _ error) error {
  657. if !info.IsDir() {
  658. path, tag := filepath.Split(path)
  659. model := strings.Trim(strings.TrimPrefix(path, manifestsPath), string(os.PathSeparator))
  660. modelPath := strings.Join([]string{model, tag}, ":")
  661. canonicalModelPath := strings.ReplaceAll(modelPath, string(os.PathSeparator), "/")
  662. resp, err := modelResponse(canonicalModelPath)
  663. if err != nil {
  664. slog.Info(fmt.Sprintf("skipping file: %s", canonicalModelPath))
  665. // nolint: nilerr
  666. return nil
  667. }
  668. resp.ModifiedAt = info.ModTime()
  669. models = append(models, resp)
  670. }
  671. return nil
  672. }
  673. if err := filepath.Walk(manifestsPath, walkFunc); err != nil {
  674. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  675. return
  676. }
  677. c.JSON(http.StatusOK, api.ListResponse{Models: models})
  678. }
  679. func CopyModelHandler(c *gin.Context) {
  680. var req api.CopyRequest
  681. err := c.ShouldBindJSON(&req)
  682. switch {
  683. case errors.Is(err, io.EOF):
  684. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  685. return
  686. case err != nil:
  687. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  688. return
  689. }
  690. if req.Source == "" || req.Destination == "" {
  691. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "source add destination are required"})
  692. return
  693. }
  694. if err := ParseModelPath(req.Destination).Validate(); err != nil {
  695. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  696. return
  697. }
  698. if err := CopyModel(req.Source, req.Destination); err != nil {
  699. if os.IsNotExist(err) {
  700. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Source)})
  701. } else {
  702. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  703. }
  704. return
  705. }
  706. }
  707. func HeadBlobHandler(c *gin.Context) {
  708. path, err := GetBlobsPath(c.Param("digest"))
  709. if err != nil {
  710. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  711. return
  712. }
  713. if _, err := os.Stat(path); err != nil {
  714. c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("blob %q not found", c.Param("digest"))})
  715. return
  716. }
  717. c.Status(http.StatusOK)
  718. }
  719. func CreateBlobHandler(c *gin.Context) {
  720. layer, err := NewLayer(c.Request.Body, "")
  721. if err != nil {
  722. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  723. return
  724. }
  725. if layer.Digest != c.Param("digest") {
  726. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("digest mismatch, expected %q, got %q", c.Param("digest"), layer.Digest)})
  727. return
  728. }
  729. if _, err := layer.Commit(); err != nil {
  730. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  731. return
  732. }
  733. c.Status(http.StatusCreated)
  734. }
  735. var defaultAllowOrigins = []string{
  736. "localhost",
  737. "127.0.0.1",
  738. "0.0.0.0",
  739. }
  740. func NewServer() (*Server, error) {
  741. workDir, err := os.MkdirTemp("", "ollama")
  742. if err != nil {
  743. return nil, err
  744. }
  745. return &Server{
  746. WorkDir: workDir,
  747. }, nil
  748. }
  749. func (s *Server) GenerateRoutes() http.Handler {
  750. var origins []string
  751. if o := os.Getenv("OLLAMA_ORIGINS"); o != "" {
  752. origins = strings.Split(o, ",")
  753. }
  754. config := cors.DefaultConfig()
  755. config.AllowWildcard = true
  756. config.AllowBrowserExtensions = true
  757. config.AllowOrigins = origins
  758. for _, allowOrigin := range defaultAllowOrigins {
  759. config.AllowOrigins = append(config.AllowOrigins,
  760. fmt.Sprintf("http://%s", allowOrigin),
  761. fmt.Sprintf("https://%s", allowOrigin),
  762. fmt.Sprintf("http://%s:*", allowOrigin),
  763. fmt.Sprintf("https://%s:*", allowOrigin),
  764. )
  765. }
  766. r := gin.Default()
  767. r.Use(
  768. cors.New(config),
  769. func(c *gin.Context) {
  770. c.Set("workDir", s.WorkDir)
  771. c.Next()
  772. },
  773. )
  774. r.POST("/api/pull", PullModelHandler)
  775. r.POST("/api/generate", GenerateHandler)
  776. r.POST("/api/chat", ChatHandler)
  777. r.POST("/api/embeddings", EmbeddingHandler)
  778. r.POST("/api/create", CreateModelHandler)
  779. r.POST("/api/push", PushModelHandler)
  780. r.POST("/api/copy", CopyModelHandler)
  781. r.DELETE("/api/delete", DeleteModelHandler)
  782. r.POST("/api/show", ShowModelHandler)
  783. r.POST("/api/blobs/:digest", CreateBlobHandler)
  784. r.HEAD("/api/blobs/:digest", HeadBlobHandler)
  785. for _, method := range []string{http.MethodGet, http.MethodHead} {
  786. r.Handle(method, "/", func(c *gin.Context) {
  787. c.String(http.StatusOK, "Ollama is running")
  788. })
  789. r.Handle(method, "/api/tags", ListModelsHandler)
  790. r.Handle(method, "/api/version", func(c *gin.Context) {
  791. c.JSON(http.StatusOK, gin.H{"version": version.Version})
  792. })
  793. }
  794. return r
  795. }
  796. func Serve(ln net.Listener) error {
  797. level := slog.LevelInfo
  798. if debug := os.Getenv("OLLAMA_DEBUG"); debug != "" {
  799. level = slog.LevelDebug
  800. }
  801. handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
  802. Level: level,
  803. AddSource: true,
  804. ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
  805. if attr.Key == slog.SourceKey {
  806. source := attr.Value.Any().(*slog.Source)
  807. source.File = filepath.Base(source.File)
  808. }
  809. return attr
  810. },
  811. })
  812. slog.SetDefault(slog.New(handler))
  813. if noprune := os.Getenv("OLLAMA_NOPRUNE"); noprune == "" {
  814. // clean up unused layers and manifests
  815. if err := PruneLayers(); err != nil {
  816. return err
  817. }
  818. manifestsPath, err := GetManifestPath()
  819. if err != nil {
  820. return err
  821. }
  822. if err := PruneDirectory(manifestsPath); err != nil {
  823. return err
  824. }
  825. }
  826. s, err := NewServer()
  827. if err != nil {
  828. return err
  829. }
  830. r := s.GenerateRoutes()
  831. slog.Info(fmt.Sprintf("Listening on %s (version %s)", ln.Addr(), version.Version))
  832. srvr := &http.Server{
  833. Handler: r,
  834. }
  835. // listen for a ctrl+c and stop any loaded llm
  836. signals := make(chan os.Signal, 1)
  837. signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
  838. go func() {
  839. <-signals
  840. if loaded.runner != nil {
  841. loaded.runner.Close()
  842. }
  843. os.RemoveAll(s.WorkDir)
  844. os.Exit(0)
  845. }()
  846. if err := llm.Init(s.WorkDir); err != nil {
  847. return fmt.Errorf("unable to initialize llm library %w", err)
  848. }
  849. if runtime.GOOS == "linux" { // TODO - windows too
  850. // check compatibility to log warnings
  851. if _, err := gpu.CheckVRAM(); err != nil {
  852. slog.Info(err.Error())
  853. }
  854. }
  855. return srvr.Serve(ln)
  856. }
  857. func waitForStream(c *gin.Context, ch chan interface{}) {
  858. c.Header("Content-Type", "application/json")
  859. for resp := range ch {
  860. switch r := resp.(type) {
  861. case api.ProgressResponse:
  862. if r.Status == "success" {
  863. c.JSON(http.StatusOK, r)
  864. return
  865. }
  866. case gin.H:
  867. if errorMsg, ok := r["error"].(string); ok {
  868. c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
  869. return
  870. } else {
  871. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in progress response"})
  872. return
  873. }
  874. default:
  875. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected progress response"})
  876. return
  877. }
  878. }
  879. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected end of progress response"})
  880. }
  881. func streamResponse(c *gin.Context, ch chan any) {
  882. c.Header("Content-Type", "application/x-ndjson")
  883. c.Stream(func(w io.Writer) bool {
  884. val, ok := <-ch
  885. if !ok {
  886. return false
  887. }
  888. bts, err := json.Marshal(val)
  889. if err != nil {
  890. slog.Info(fmt.Sprintf("streamResponse: json.Marshal failed with %s", err))
  891. return false
  892. }
  893. // Delineate chunks with new-line delimiter
  894. bts = append(bts, '\n')
  895. if _, err := w.Write(bts); err != nil {
  896. slog.Info(fmt.Sprintf("streamResponse: w.Write failed with %s", err))
  897. return false
  898. }
  899. return true
  900. })
  901. }
  902. func ChatHandler(c *gin.Context) {
  903. loaded.mu.Lock()
  904. defer loaded.mu.Unlock()
  905. checkpointStart := time.Now()
  906. var req api.ChatRequest
  907. err := c.ShouldBindJSON(&req)
  908. switch {
  909. case errors.Is(err, io.EOF):
  910. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  911. return
  912. case err != nil:
  913. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  914. return
  915. }
  916. // validate the request
  917. switch {
  918. case req.Model == "":
  919. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  920. return
  921. case len(req.Format) > 0 && req.Format != "json":
  922. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
  923. return
  924. }
  925. model, err := GetModel(req.Model)
  926. if err != nil {
  927. var pErr *fs.PathError
  928. if errors.As(err, &pErr) {
  929. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
  930. return
  931. }
  932. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  933. return
  934. }
  935. opts, err := modelOptions(model, req.Options)
  936. if err != nil {
  937. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  938. return
  939. }
  940. var sessionDuration time.Duration
  941. if req.KeepAlive == nil {
  942. sessionDuration = defaultSessionDuration
  943. } else {
  944. sessionDuration = req.KeepAlive.Duration
  945. }
  946. if err := load(c, model, opts, sessionDuration); err != nil {
  947. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  948. return
  949. }
  950. // an empty request loads the model
  951. if len(req.Messages) == 0 {
  952. resp := api.ChatResponse{
  953. CreatedAt: time.Now().UTC(),
  954. Model: req.Model,
  955. Done: true,
  956. Message: api.Message{Role: "assistant"},
  957. }
  958. c.JSON(http.StatusOK, resp)
  959. return
  960. }
  961. checkpointLoaded := time.Now()
  962. chat, err := model.ChatPrompts(req.Messages)
  963. if err != nil {
  964. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  965. return
  966. }
  967. prompt, images, err := trimmedPrompt(c.Request.Context(), chat, model)
  968. if err != nil {
  969. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  970. return
  971. }
  972. slog.Debug("chat handler", "prompt", prompt)
  973. ch := make(chan any)
  974. go func() {
  975. defer close(ch)
  976. fn := func(r llm.PredictResult) {
  977. // Update model expiration
  978. loaded.expireAt = time.Now().Add(sessionDuration)
  979. loaded.expireTimer.Reset(sessionDuration)
  980. resp := api.ChatResponse{
  981. Model: req.Model,
  982. CreatedAt: time.Now().UTC(),
  983. Message: api.Message{Role: "assistant", Content: r.Content},
  984. Done: r.Done,
  985. Metrics: api.Metrics{
  986. PromptEvalCount: r.PromptEvalCount,
  987. PromptEvalDuration: r.PromptEvalDuration,
  988. EvalCount: r.EvalCount,
  989. EvalDuration: r.EvalDuration,
  990. },
  991. }
  992. if r.Done {
  993. resp.TotalDuration = time.Since(checkpointStart)
  994. resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
  995. }
  996. ch <- resp
  997. }
  998. // Start prediction
  999. predictReq := llm.PredictOpts{
  1000. Prompt: prompt,
  1001. Format: req.Format,
  1002. Images: images,
  1003. Options: opts,
  1004. }
  1005. if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
  1006. ch <- gin.H{"error": err.Error()}
  1007. }
  1008. }()
  1009. if req.Stream != nil && !*req.Stream {
  1010. // Accumulate responses into the final response
  1011. var final api.ChatResponse
  1012. var sb strings.Builder
  1013. for resp := range ch {
  1014. switch r := resp.(type) {
  1015. case api.ChatResponse:
  1016. sb.WriteString(r.Message.Content)
  1017. final = r
  1018. case gin.H:
  1019. if errorMsg, ok := r["error"].(string); ok {
  1020. c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
  1021. return
  1022. } else {
  1023. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
  1024. return
  1025. }
  1026. default:
  1027. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
  1028. return
  1029. }
  1030. }
  1031. final.Message = api.Message{Role: "assistant", Content: sb.String()}
  1032. c.JSON(http.StatusOK, final)
  1033. return
  1034. }
  1035. streamResponse(c, ch)
  1036. }
  1037. // promptInfo stores the variables used to template a prompt, and the token length of the resulting template for some model
  1038. type promptInfo struct {
  1039. vars PromptVars
  1040. tokenLen int
  1041. }
  1042. // trimmedPrompt builds a prompt to send to a running model. It ensures the prompt fits within the max context length,
  1043. // while preserving the most recent system message.
  1044. func trimmedPrompt(ctx context.Context, chat *ChatHistory, model *Model) (string, []llm.ImageData, error) {
  1045. if len(chat.Prompts) == 0 {
  1046. return "", nil, nil
  1047. }
  1048. var promptsToAdd []promptInfo
  1049. var totalTokenLength int
  1050. var systemPromptIncluded bool
  1051. var images []llm.ImageData
  1052. // reverse iterate through the prompts to build the prompt string in a way that fits the max context length
  1053. for i := len(chat.Prompts) - 1; i >= 0; i-- {
  1054. prompt := chat.Prompts[i]
  1055. promptText, err := promptString(model, prompt, i == len(chat.Prompts)-1)
  1056. if err != nil {
  1057. return "", nil, err
  1058. }
  1059. encodedTokens, err := loaded.runner.Encode(ctx, promptText)
  1060. if err != nil {
  1061. return "", nil, err
  1062. }
  1063. if totalTokenLength+len(encodedTokens) > loaded.NumCtx && i != len(chat.Prompts)-1 {
  1064. break // reached max context length, stop adding more prompts
  1065. }
  1066. for j := range prompt.Images {
  1067. if totalTokenLength+768 > loaded.NumCtx {
  1068. // this decreases the token length but overestimating is fine
  1069. prompt.Prompt = strings.ReplaceAll(prompt.Prompt, fmt.Sprintf(" [img-%d]", prompt.Images[j].ID), "")
  1070. continue
  1071. }
  1072. totalTokenLength += 768
  1073. images = append(images, prompt.Images[j])
  1074. }
  1075. totalTokenLength += len(encodedTokens)
  1076. systemPromptIncluded = systemPromptIncluded || prompt.System != ""
  1077. promptsToAdd = append(promptsToAdd, promptInfo{vars: prompt, tokenLen: len(encodedTokens)})
  1078. }
  1079. // ensure the system prompt is included, if not already
  1080. if chat.LastSystem != "" && !systemPromptIncluded {
  1081. var err error
  1082. promptsToAdd, err = includeSystemPrompt(ctx, chat.LastSystem, totalTokenLength, promptsToAdd)
  1083. if err != nil {
  1084. return "", nil, err
  1085. }
  1086. }
  1087. promptsToAdd[len(promptsToAdd)-1].vars.First = true
  1088. // construct the final prompt string from the prompts which fit within the context window
  1089. var result string
  1090. for i, prompt := range promptsToAdd {
  1091. promptText, err := promptString(model, prompt.vars, i == 0)
  1092. if err != nil {
  1093. return "", nil, err
  1094. }
  1095. result = promptText + result
  1096. }
  1097. return result, images, nil
  1098. }
  1099. // promptString applies the model template to the prompt
  1100. func promptString(model *Model, vars PromptVars, isMostRecent bool) (string, error) {
  1101. if isMostRecent {
  1102. p, err := model.PreResponsePrompt(vars)
  1103. if err != nil {
  1104. return "", fmt.Errorf("pre-response template: %w", err)
  1105. }
  1106. return p, nil
  1107. }
  1108. p, err := Prompt(model.Template, vars)
  1109. if err != nil {
  1110. return "", err
  1111. }
  1112. return p, nil
  1113. }
  1114. // includeSystemPrompt adjusts the prompts to include the system prompt.
  1115. func includeSystemPrompt(ctx context.Context, systemPrompt string, totalTokenLength int, promptsToAdd []promptInfo) ([]promptInfo, error) {
  1116. systemTokens, err := loaded.runner.Encode(ctx, systemPrompt)
  1117. if err != nil {
  1118. return nil, err
  1119. }
  1120. for i := len(promptsToAdd) - 1; i >= 0; i-- {
  1121. if totalTokenLength+len(systemTokens) <= loaded.NumCtx {
  1122. promptsToAdd[i].vars.System = systemPrompt
  1123. return promptsToAdd[:i+1], nil
  1124. }
  1125. totalTokenLength -= promptsToAdd[i].tokenLen
  1126. }
  1127. // if got here, system did not fit anywhere, so return the most recent prompt with the system message set
  1128. recent := promptsToAdd[len(promptsToAdd)-1]
  1129. recent.vars.System = systemPrompt
  1130. return []promptInfo{recent}, nil
  1131. }