routes.go 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324
  1. package server
  2. import (
  3. "context"
  4. "encoding/json"
  5. "errors"
  6. "fmt"
  7. "io"
  8. "io/fs"
  9. "log/slog"
  10. "net"
  11. "net/http"
  12. "os"
  13. "os/signal"
  14. "path/filepath"
  15. "reflect"
  16. "runtime"
  17. "strings"
  18. "sync"
  19. "syscall"
  20. "time"
  21. "github.com/gin-contrib/cors"
  22. "github.com/gin-gonic/gin"
  23. "github.com/jmorganca/ollama/api"
  24. "github.com/jmorganca/ollama/gpu"
  25. "github.com/jmorganca/ollama/llm"
  26. "github.com/jmorganca/ollama/parser"
  27. "github.com/jmorganca/ollama/version"
  28. )
  29. var mode string = gin.DebugMode
  30. type Server struct {
  31. WorkDir string
  32. }
  33. func init() {
  34. switch mode {
  35. case gin.DebugMode:
  36. case gin.ReleaseMode:
  37. case gin.TestMode:
  38. default:
  39. mode = gin.DebugMode
  40. }
  41. gin.SetMode(mode)
  42. }
  43. var loaded struct {
  44. mu sync.Mutex
  45. runner llm.LLM
  46. expireAt time.Time
  47. expireTimer *time.Timer
  48. *Model
  49. *api.Options
  50. }
  51. var defaultSessionDuration = 5 * time.Minute
  52. // load a model into memory if it is not already loaded, it is up to the caller to lock loaded.mu before calling this function
  53. func load(c *gin.Context, model *Model, opts api.Options, sessionDuration time.Duration) error {
  54. workDir := c.GetString("workDir")
  55. needLoad := loaded.runner == nil || // is there a model loaded?
  56. loaded.ModelPath != model.ModelPath || // has the base model changed?
  57. !reflect.DeepEqual(loaded.AdapterPaths, model.AdapterPaths) || // have the adapters changed?
  58. !reflect.DeepEqual(loaded.Options.Runner, opts.Runner) // have the runner options changed?
  59. if needLoad {
  60. if loaded.runner != nil {
  61. slog.Info("changing loaded model")
  62. loaded.runner.Close()
  63. loaded.runner = nil
  64. loaded.Model = nil
  65. loaded.Options = nil
  66. }
  67. llmRunner, err := llm.New(workDir, model.ModelPath, model.AdapterPaths, model.ProjectorPaths, opts)
  68. if err != nil {
  69. // some older models are not compatible with newer versions of llama.cpp
  70. // show a generalized compatibility error until there is a better way to
  71. // check for model compatibility
  72. if errors.Is(llm.ErrUnsupportedFormat, err) || strings.Contains(err.Error(), "failed to load model") {
  73. err = fmt.Errorf("%v: this model may be incompatible with your version of Ollama. If you previously pulled this model, try updating it by running `ollama pull %s`", err, model.ShortName)
  74. }
  75. return err
  76. }
  77. loaded.Model = model
  78. loaded.runner = llmRunner
  79. loaded.Options = &opts
  80. }
  81. loaded.expireAt = time.Now().Add(sessionDuration)
  82. if loaded.expireTimer == nil {
  83. loaded.expireTimer = time.AfterFunc(sessionDuration, func() {
  84. loaded.mu.Lock()
  85. defer loaded.mu.Unlock()
  86. if time.Now().Before(loaded.expireAt) {
  87. return
  88. }
  89. if loaded.runner != nil {
  90. loaded.runner.Close()
  91. }
  92. loaded.runner = nil
  93. loaded.Model = nil
  94. loaded.Options = nil
  95. })
  96. }
  97. loaded.expireTimer.Reset(sessionDuration)
  98. return nil
  99. }
  100. func modelOptions(model *Model, requestOpts map[string]interface{}) (api.Options, error) {
  101. opts := api.DefaultOptions()
  102. if err := opts.FromMap(model.Options); err != nil {
  103. return api.Options{}, err
  104. }
  105. if err := opts.FromMap(requestOpts); err != nil {
  106. return api.Options{}, err
  107. }
  108. return opts, nil
  109. }
  110. func GenerateHandler(c *gin.Context) {
  111. loaded.mu.Lock()
  112. defer loaded.mu.Unlock()
  113. checkpointStart := time.Now()
  114. var req api.GenerateRequest
  115. err := c.ShouldBindJSON(&req)
  116. switch {
  117. case errors.Is(err, io.EOF):
  118. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  119. return
  120. case err != nil:
  121. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  122. return
  123. }
  124. // validate the request
  125. switch {
  126. case req.Model == "":
  127. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  128. return
  129. case len(req.Format) > 0 && req.Format != "json":
  130. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
  131. return
  132. case req.Raw && (req.Template != "" || req.System != "" || len(req.Context) > 0):
  133. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "raw mode does not support template, system, or context"})
  134. return
  135. }
  136. model, err := GetModel(req.Model)
  137. if err != nil {
  138. var pErr *fs.PathError
  139. if errors.As(err, &pErr) {
  140. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
  141. return
  142. }
  143. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  144. return
  145. }
  146. opts, err := modelOptions(model, req.Options)
  147. if err != nil {
  148. if errors.Is(err, api.ErrInvalidOpts) {
  149. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  150. return
  151. }
  152. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  153. return
  154. }
  155. var sessionDuration time.Duration
  156. if req.KeepAlive == nil {
  157. sessionDuration = defaultSessionDuration
  158. } else {
  159. sessionDuration = req.KeepAlive.Duration
  160. }
  161. if err := load(c, model, opts, sessionDuration); err != nil {
  162. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  163. return
  164. }
  165. // an empty request loads the model
  166. if req.Prompt == "" && req.Template == "" && req.System == "" {
  167. c.JSON(http.StatusOK, api.GenerateResponse{
  168. CreatedAt: time.Now().UTC(),
  169. Model: req.Model,
  170. Done: true,
  171. })
  172. return
  173. }
  174. checkpointLoaded := time.Now()
  175. var prompt string
  176. var promptVars PromptVars
  177. switch {
  178. case req.Raw:
  179. prompt = req.Prompt
  180. case req.Prompt != "":
  181. if req.Template != "" {
  182. // override the default model template
  183. model.Template = req.Template
  184. }
  185. var rebuild strings.Builder
  186. if req.Context != nil {
  187. // TODO: context is deprecated, at some point the context logic within this conditional should be removed
  188. prevCtx, err := loaded.runner.Decode(c.Request.Context(), req.Context)
  189. if err != nil {
  190. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  191. return
  192. }
  193. // Remove leading spaces from prevCtx if present
  194. prevCtx = strings.TrimPrefix(prevCtx, " ")
  195. rebuild.WriteString(prevCtx)
  196. }
  197. promptVars = PromptVars{
  198. System: req.System,
  199. Prompt: req.Prompt,
  200. First: len(req.Context) == 0,
  201. }
  202. if promptVars.System == "" {
  203. promptVars.System = model.System
  204. }
  205. for i := range req.Images {
  206. promptVars.Prompt += fmt.Sprintf(" [img-%d]", i)
  207. }
  208. p, err := model.PreResponsePrompt(promptVars)
  209. if err != nil {
  210. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  211. return
  212. }
  213. rebuild.WriteString(p)
  214. prompt = rebuild.String()
  215. }
  216. slog.Debug(fmt.Sprintf("prompt: %s", prompt))
  217. ch := make(chan any)
  218. var generated strings.Builder
  219. go func() {
  220. defer close(ch)
  221. fn := func(r llm.PredictResult) {
  222. // Update model expiration
  223. loaded.expireAt = time.Now().Add(sessionDuration)
  224. loaded.expireTimer.Reset(sessionDuration)
  225. // Build up the full response
  226. if _, err := generated.WriteString(r.Content); err != nil {
  227. ch <- gin.H{"error": err.Error()}
  228. return
  229. }
  230. resp := api.GenerateResponse{
  231. Model: req.Model,
  232. CreatedAt: time.Now().UTC(),
  233. Done: r.Done,
  234. Response: r.Content,
  235. Metrics: api.Metrics{
  236. PromptEvalCount: r.PromptEvalCount,
  237. PromptEvalDuration: r.PromptEvalDuration,
  238. EvalCount: r.EvalCount,
  239. EvalDuration: r.EvalDuration,
  240. },
  241. }
  242. if r.Done {
  243. resp.TotalDuration = time.Since(checkpointStart)
  244. resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
  245. if !req.Raw {
  246. // append the generated text to the history and template it if needed
  247. promptVars.Response = generated.String()
  248. result, err := model.PostResponseTemplate(promptVars)
  249. if err != nil {
  250. ch <- gin.H{"error": err.Error()}
  251. return
  252. }
  253. embd, err := loaded.runner.Encode(c.Request.Context(), prompt+result)
  254. if err != nil {
  255. ch <- gin.H{"error": err.Error()}
  256. return
  257. }
  258. resp.Context = embd
  259. }
  260. }
  261. ch <- resp
  262. }
  263. // Start prediction
  264. predictReq := llm.PredictOpts{
  265. Prompt: prompt,
  266. Format: req.Format,
  267. Images: req.Images,
  268. Options: opts,
  269. }
  270. if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
  271. ch <- gin.H{"error": err.Error()}
  272. }
  273. }()
  274. if req.Stream != nil && !*req.Stream {
  275. // Accumulate responses into the final response
  276. var final api.GenerateResponse
  277. var sb strings.Builder
  278. for resp := range ch {
  279. switch r := resp.(type) {
  280. case api.GenerateResponse:
  281. sb.WriteString(r.Response)
  282. final = r
  283. case gin.H:
  284. if errorMsg, ok := r["error"].(string); ok {
  285. c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
  286. return
  287. } else {
  288. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
  289. return
  290. }
  291. default:
  292. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
  293. return
  294. }
  295. }
  296. final.Response = sb.String()
  297. c.JSON(http.StatusOK, final)
  298. return
  299. }
  300. streamResponse(c, ch)
  301. }
  302. func EmbeddingHandler(c *gin.Context) {
  303. loaded.mu.Lock()
  304. defer loaded.mu.Unlock()
  305. var req api.EmbeddingRequest
  306. err := c.ShouldBindJSON(&req)
  307. switch {
  308. case errors.Is(err, io.EOF):
  309. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  310. return
  311. case err != nil:
  312. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  313. return
  314. }
  315. if req.Model == "" {
  316. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  317. return
  318. }
  319. model, err := GetModel(req.Model)
  320. if err != nil {
  321. var pErr *fs.PathError
  322. if errors.As(err, &pErr) {
  323. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
  324. return
  325. }
  326. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  327. return
  328. }
  329. opts, err := modelOptions(model, req.Options)
  330. if err != nil {
  331. if errors.Is(err, api.ErrInvalidOpts) {
  332. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  333. return
  334. }
  335. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  336. return
  337. }
  338. var sessionDuration time.Duration
  339. if req.KeepAlive == nil {
  340. sessionDuration = defaultSessionDuration
  341. } else {
  342. sessionDuration = req.KeepAlive.Duration
  343. }
  344. if err := load(c, model, opts, sessionDuration); err != nil {
  345. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  346. return
  347. }
  348. if !loaded.Options.EmbeddingOnly {
  349. c.JSON(http.StatusBadRequest, gin.H{"error": "embedding option must be set to true"})
  350. return
  351. }
  352. embedding, err := loaded.runner.Embedding(c.Request.Context(), req.Prompt)
  353. if err != nil {
  354. slog.Info(fmt.Sprintf("embedding generation failed: %v", err))
  355. c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate embedding"})
  356. return
  357. }
  358. resp := api.EmbeddingResponse{
  359. Embedding: embedding,
  360. }
  361. c.JSON(http.StatusOK, resp)
  362. }
  363. func PullModelHandler(c *gin.Context) {
  364. var req api.PullRequest
  365. err := c.ShouldBindJSON(&req)
  366. switch {
  367. case errors.Is(err, io.EOF):
  368. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  369. return
  370. case err != nil:
  371. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  372. return
  373. }
  374. var model string
  375. if req.Model != "" {
  376. model = req.Model
  377. } else if req.Name != "" {
  378. model = req.Name
  379. } else {
  380. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  381. return
  382. }
  383. ch := make(chan any)
  384. go func() {
  385. defer close(ch)
  386. fn := func(r api.ProgressResponse) {
  387. ch <- r
  388. }
  389. regOpts := &RegistryOptions{
  390. Insecure: req.Insecure,
  391. }
  392. ctx, cancel := context.WithCancel(c.Request.Context())
  393. defer cancel()
  394. if err := PullModel(ctx, model, regOpts, fn); err != nil {
  395. ch <- gin.H{"error": err.Error()}
  396. }
  397. }()
  398. if req.Stream != nil && !*req.Stream {
  399. waitForStream(c, ch)
  400. return
  401. }
  402. streamResponse(c, ch)
  403. }
  404. func PushModelHandler(c *gin.Context) {
  405. var req api.PushRequest
  406. err := c.ShouldBindJSON(&req)
  407. switch {
  408. case errors.Is(err, io.EOF):
  409. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  410. return
  411. case err != nil:
  412. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  413. return
  414. }
  415. var model string
  416. if req.Model != "" {
  417. model = req.Model
  418. } else if req.Name != "" {
  419. model = req.Name
  420. } else {
  421. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  422. return
  423. }
  424. ch := make(chan any)
  425. go func() {
  426. defer close(ch)
  427. fn := func(r api.ProgressResponse) {
  428. ch <- r
  429. }
  430. regOpts := &RegistryOptions{
  431. Insecure: req.Insecure,
  432. }
  433. ctx, cancel := context.WithCancel(c.Request.Context())
  434. defer cancel()
  435. if err := PushModel(ctx, model, regOpts, fn); err != nil {
  436. ch <- gin.H{"error": err.Error()}
  437. }
  438. }()
  439. if req.Stream != nil && !*req.Stream {
  440. waitForStream(c, ch)
  441. return
  442. }
  443. streamResponse(c, ch)
  444. }
  445. func CreateModelHandler(c *gin.Context) {
  446. var req api.CreateRequest
  447. err := c.ShouldBindJSON(&req)
  448. switch {
  449. case errors.Is(err, io.EOF):
  450. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  451. return
  452. case err != nil:
  453. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  454. return
  455. }
  456. var model string
  457. if req.Model != "" {
  458. model = req.Model
  459. } else if req.Name != "" {
  460. model = req.Name
  461. } else {
  462. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  463. return
  464. }
  465. if err := ParseModelPath(model).Validate(); err != nil {
  466. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  467. return
  468. }
  469. if req.Path == "" && req.Modelfile == "" {
  470. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "path or modelfile are required"})
  471. return
  472. }
  473. var modelfile io.Reader = strings.NewReader(req.Modelfile)
  474. if req.Path != "" && req.Modelfile == "" {
  475. mf, err := os.Open(req.Path)
  476. if err != nil {
  477. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("error reading modelfile: %s", err)})
  478. return
  479. }
  480. defer mf.Close()
  481. modelfile = mf
  482. }
  483. commands, err := parser.Parse(modelfile)
  484. if err != nil {
  485. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  486. return
  487. }
  488. ch := make(chan any)
  489. go func() {
  490. defer close(ch)
  491. fn := func(resp api.ProgressResponse) {
  492. ch <- resp
  493. }
  494. ctx, cancel := context.WithCancel(c.Request.Context())
  495. defer cancel()
  496. if err := CreateModel(ctx, model, filepath.Dir(req.Path), commands, fn); err != nil {
  497. ch <- gin.H{"error": err.Error()}
  498. }
  499. }()
  500. if req.Stream != nil && !*req.Stream {
  501. waitForStream(c, ch)
  502. return
  503. }
  504. streamResponse(c, ch)
  505. }
  506. func DeleteModelHandler(c *gin.Context) {
  507. var req api.DeleteRequest
  508. err := c.ShouldBindJSON(&req)
  509. switch {
  510. case errors.Is(err, io.EOF):
  511. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  512. return
  513. case err != nil:
  514. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  515. return
  516. }
  517. var model string
  518. if req.Model != "" {
  519. model = req.Model
  520. } else if req.Name != "" {
  521. model = req.Name
  522. } else {
  523. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  524. return
  525. }
  526. if err := DeleteModel(model); err != nil {
  527. if os.IsNotExist(err) {
  528. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", model)})
  529. } else {
  530. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  531. }
  532. return
  533. }
  534. manifestsPath, err := GetManifestPath()
  535. if err != nil {
  536. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  537. return
  538. }
  539. if err := PruneDirectory(manifestsPath); err != nil {
  540. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  541. return
  542. }
  543. c.JSON(http.StatusOK, nil)
  544. }
  545. func ShowModelHandler(c *gin.Context) {
  546. var req api.ShowRequest
  547. err := c.ShouldBindJSON(&req)
  548. switch {
  549. case errors.Is(err, io.EOF):
  550. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  551. return
  552. case err != nil:
  553. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  554. return
  555. }
  556. if req.Model != "" {
  557. // noop
  558. } else if req.Name != "" {
  559. req.Model = req.Name
  560. } else {
  561. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  562. return
  563. }
  564. resp, err := GetModelInfo(req)
  565. if err != nil {
  566. if os.IsNotExist(err) {
  567. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  568. } else {
  569. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  570. }
  571. return
  572. }
  573. c.JSON(http.StatusOK, resp)
  574. }
  575. func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
  576. model, err := GetModel(req.Model)
  577. if err != nil {
  578. return nil, err
  579. }
  580. modelDetails := api.ModelDetails{
  581. ParentModel: model.ParentModel,
  582. Format: model.Config.ModelFormat,
  583. Family: model.Config.ModelFamily,
  584. Families: model.Config.ModelFamilies,
  585. ParameterSize: model.Config.ModelType,
  586. QuantizationLevel: model.Config.FileType,
  587. }
  588. if req.System != "" {
  589. model.System = req.System
  590. }
  591. if req.Template != "" {
  592. model.Template = req.Template
  593. }
  594. msgs := make([]api.Message, 0)
  595. for _, msg := range model.Messages {
  596. msgs = append(msgs, api.Message{Role: msg.Role, Content: msg.Content})
  597. }
  598. resp := &api.ShowResponse{
  599. License: strings.Join(model.License, "\n"),
  600. System: model.System,
  601. Template: model.Template,
  602. Details: modelDetails,
  603. Messages: msgs,
  604. }
  605. var params []string
  606. cs := 30
  607. for k, v := range model.Options {
  608. switch val := v.(type) {
  609. case []interface{}:
  610. for _, nv := range val {
  611. params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv))
  612. }
  613. default:
  614. params = append(params, fmt.Sprintf("%-*s %#v", cs, k, v))
  615. }
  616. }
  617. resp.Parameters = strings.Join(params, "\n")
  618. for k, v := range req.Options {
  619. if _, ok := req.Options[k]; ok {
  620. model.Options[k] = v
  621. }
  622. }
  623. mf, err := ShowModelfile(model)
  624. if err != nil {
  625. return nil, err
  626. }
  627. resp.Modelfile = mf
  628. return resp, nil
  629. }
  630. func ListModelsHandler(c *gin.Context) {
  631. models := make([]api.ModelResponse, 0)
  632. manifestsPath, err := GetManifestPath()
  633. if err != nil {
  634. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  635. return
  636. }
  637. modelResponse := func(modelName string) (api.ModelResponse, error) {
  638. model, err := GetModel(modelName)
  639. if err != nil {
  640. return api.ModelResponse{}, err
  641. }
  642. modelDetails := api.ModelDetails{
  643. Format: model.Config.ModelFormat,
  644. Family: model.Config.ModelFamily,
  645. Families: model.Config.ModelFamilies,
  646. ParameterSize: model.Config.ModelType,
  647. QuantizationLevel: model.Config.FileType,
  648. }
  649. return api.ModelResponse{
  650. Model: model.ShortName,
  651. Name: model.ShortName,
  652. Size: model.Size,
  653. Digest: model.Digest,
  654. Details: modelDetails,
  655. }, nil
  656. }
  657. walkFunc := func(path string, info os.FileInfo, _ error) error {
  658. if !info.IsDir() {
  659. path, tag := filepath.Split(path)
  660. model := strings.Trim(strings.TrimPrefix(path, manifestsPath), string(os.PathSeparator))
  661. modelPath := strings.Join([]string{model, tag}, ":")
  662. canonicalModelPath := strings.ReplaceAll(modelPath, string(os.PathSeparator), "/")
  663. resp, err := modelResponse(canonicalModelPath)
  664. if err != nil {
  665. slog.Info(fmt.Sprintf("skipping file: %s", canonicalModelPath))
  666. // nolint: nilerr
  667. return nil
  668. }
  669. resp.ModifiedAt = info.ModTime()
  670. models = append(models, resp)
  671. }
  672. return nil
  673. }
  674. if err := filepath.Walk(manifestsPath, walkFunc); err != nil {
  675. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  676. return
  677. }
  678. c.JSON(http.StatusOK, api.ListResponse{Models: models})
  679. }
  680. func CopyModelHandler(c *gin.Context) {
  681. var req api.CopyRequest
  682. err := c.ShouldBindJSON(&req)
  683. switch {
  684. case errors.Is(err, io.EOF):
  685. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  686. return
  687. case err != nil:
  688. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  689. return
  690. }
  691. if req.Source == "" || req.Destination == "" {
  692. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "source add destination are required"})
  693. return
  694. }
  695. if err := ParseModelPath(req.Destination).Validate(); err != nil {
  696. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  697. return
  698. }
  699. if err := CopyModel(req.Source, req.Destination); err != nil {
  700. if os.IsNotExist(err) {
  701. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Source)})
  702. } else {
  703. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  704. }
  705. return
  706. }
  707. }
  708. func HeadBlobHandler(c *gin.Context) {
  709. path, err := GetBlobsPath(c.Param("digest"))
  710. if err != nil {
  711. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  712. return
  713. }
  714. if _, err := os.Stat(path); err != nil {
  715. c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("blob %q not found", c.Param("digest"))})
  716. return
  717. }
  718. c.Status(http.StatusOK)
  719. }
  720. func CreateBlobHandler(c *gin.Context) {
  721. layer, err := NewLayer(c.Request.Body, "")
  722. if err != nil {
  723. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  724. return
  725. }
  726. if layer.Digest != c.Param("digest") {
  727. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("digest mismatch, expected %q, got %q", c.Param("digest"), layer.Digest)})
  728. return
  729. }
  730. if _, err := layer.Commit(); err != nil {
  731. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  732. return
  733. }
  734. c.Status(http.StatusCreated)
  735. }
  736. var defaultAllowOrigins = []string{
  737. "localhost",
  738. "127.0.0.1",
  739. "0.0.0.0",
  740. }
  741. func NewServer() (*Server, error) {
  742. workDir, err := os.MkdirTemp("", "ollama")
  743. if err != nil {
  744. return nil, err
  745. }
  746. return &Server{
  747. WorkDir: workDir,
  748. }, nil
  749. }
  750. func (s *Server) GenerateRoutes() http.Handler {
  751. var origins []string
  752. if o := os.Getenv("OLLAMA_ORIGINS"); o != "" {
  753. origins = strings.Split(o, ",")
  754. }
  755. config := cors.DefaultConfig()
  756. config.AllowWildcard = true
  757. config.AllowBrowserExtensions = true
  758. config.AllowOrigins = origins
  759. for _, allowOrigin := range defaultAllowOrigins {
  760. config.AllowOrigins = append(config.AllowOrigins,
  761. fmt.Sprintf("http://%s", allowOrigin),
  762. fmt.Sprintf("https://%s", allowOrigin),
  763. fmt.Sprintf("http://%s:*", allowOrigin),
  764. fmt.Sprintf("https://%s:*", allowOrigin),
  765. )
  766. }
  767. r := gin.Default()
  768. r.Use(
  769. cors.New(config),
  770. func(c *gin.Context) {
  771. c.Set("workDir", s.WorkDir)
  772. c.Next()
  773. },
  774. )
  775. r.POST("/api/pull", PullModelHandler)
  776. r.POST("/api/generate", GenerateHandler)
  777. r.POST("/api/chat", ChatHandler)
  778. r.POST("/api/embeddings", EmbeddingHandler)
  779. r.POST("/api/create", CreateModelHandler)
  780. r.POST("/api/push", PushModelHandler)
  781. r.POST("/api/copy", CopyModelHandler)
  782. r.DELETE("/api/delete", DeleteModelHandler)
  783. r.POST("/api/show", ShowModelHandler)
  784. r.POST("/api/blobs/:digest", CreateBlobHandler)
  785. r.HEAD("/api/blobs/:digest", HeadBlobHandler)
  786. for _, method := range []string{http.MethodGet, http.MethodHead} {
  787. r.Handle(method, "/", func(c *gin.Context) {
  788. c.String(http.StatusOK, "Ollama is running")
  789. })
  790. r.Handle(method, "/api/tags", ListModelsHandler)
  791. r.Handle(method, "/api/version", func(c *gin.Context) {
  792. c.JSON(http.StatusOK, gin.H{"version": version.Version})
  793. })
  794. }
  795. return r
  796. }
  797. func Serve(ln net.Listener) error {
  798. level := slog.LevelInfo
  799. if debug := os.Getenv("OLLAMA_DEBUG"); debug != "" {
  800. level = slog.LevelDebug
  801. }
  802. handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
  803. Level: level,
  804. AddSource: true,
  805. ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
  806. if attr.Key == slog.SourceKey {
  807. source := attr.Value.Any().(*slog.Source)
  808. source.File = filepath.Base(source.File)
  809. }
  810. return attr
  811. },
  812. })
  813. slog.SetDefault(slog.New(handler))
  814. if noprune := os.Getenv("OLLAMA_NOPRUNE"); noprune == "" {
  815. // clean up unused layers and manifests
  816. if err := PruneLayers(); err != nil {
  817. return err
  818. }
  819. manifestsPath, err := GetManifestPath()
  820. if err != nil {
  821. return err
  822. }
  823. if err := PruneDirectory(manifestsPath); err != nil {
  824. return err
  825. }
  826. }
  827. s, err := NewServer()
  828. if err != nil {
  829. return err
  830. }
  831. r := s.GenerateRoutes()
  832. slog.Info(fmt.Sprintf("Listening on %s (version %s)", ln.Addr(), version.Version))
  833. srvr := &http.Server{
  834. Handler: r,
  835. }
  836. // listen for a ctrl+c and stop any loaded llm
  837. signals := make(chan os.Signal, 1)
  838. signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
  839. go func() {
  840. <-signals
  841. if loaded.runner != nil {
  842. loaded.runner.Close()
  843. }
  844. os.RemoveAll(s.WorkDir)
  845. os.Exit(0)
  846. }()
  847. if err := llm.Init(s.WorkDir); err != nil {
  848. return fmt.Errorf("unable to initialize llm library %w", err)
  849. }
  850. if runtime.GOOS == "linux" { // TODO - windows too
  851. // check compatibility to log warnings
  852. if _, err := gpu.CheckVRAM(); err != nil {
  853. slog.Info(err.Error())
  854. }
  855. }
  856. return srvr.Serve(ln)
  857. }
  858. func waitForStream(c *gin.Context, ch chan interface{}) {
  859. c.Header("Content-Type", "application/json")
  860. for resp := range ch {
  861. switch r := resp.(type) {
  862. case api.ProgressResponse:
  863. if r.Status == "success" {
  864. c.JSON(http.StatusOK, r)
  865. return
  866. }
  867. case gin.H:
  868. if errorMsg, ok := r["error"].(string); ok {
  869. c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
  870. return
  871. } else {
  872. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in progress response"})
  873. return
  874. }
  875. default:
  876. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected progress response"})
  877. return
  878. }
  879. }
  880. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected end of progress response"})
  881. }
  882. func streamResponse(c *gin.Context, ch chan any) {
  883. c.Header("Content-Type", "application/x-ndjson")
  884. c.Stream(func(w io.Writer) bool {
  885. val, ok := <-ch
  886. if !ok {
  887. return false
  888. }
  889. bts, err := json.Marshal(val)
  890. if err != nil {
  891. slog.Info(fmt.Sprintf("streamResponse: json.Marshal failed with %s", err))
  892. return false
  893. }
  894. // Delineate chunks with new-line delimiter
  895. bts = append(bts, '\n')
  896. if _, err := w.Write(bts); err != nil {
  897. slog.Info(fmt.Sprintf("streamResponse: w.Write failed with %s", err))
  898. return false
  899. }
  900. return true
  901. })
  902. }
  903. func ChatHandler(c *gin.Context) {
  904. loaded.mu.Lock()
  905. defer loaded.mu.Unlock()
  906. checkpointStart := time.Now()
  907. var req api.ChatRequest
  908. err := c.ShouldBindJSON(&req)
  909. switch {
  910. case errors.Is(err, io.EOF):
  911. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  912. return
  913. case err != nil:
  914. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  915. return
  916. }
  917. // validate the request
  918. switch {
  919. case req.Model == "":
  920. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  921. return
  922. case len(req.Format) > 0 && req.Format != "json":
  923. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be json"})
  924. return
  925. }
  926. model, err := GetModel(req.Model)
  927. if err != nil {
  928. var pErr *fs.PathError
  929. if errors.As(err, &pErr) {
  930. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found, try pulling it first", req.Model)})
  931. return
  932. }
  933. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  934. return
  935. }
  936. opts, err := modelOptions(model, req.Options)
  937. if err != nil {
  938. if errors.Is(err, api.ErrInvalidOpts) {
  939. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  940. return
  941. }
  942. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  943. return
  944. }
  945. var sessionDuration time.Duration
  946. if req.KeepAlive == nil {
  947. sessionDuration = defaultSessionDuration
  948. } else {
  949. sessionDuration = req.KeepAlive.Duration
  950. }
  951. if err := load(c, model, opts, sessionDuration); err != nil {
  952. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  953. return
  954. }
  955. // an empty request loads the model
  956. if len(req.Messages) == 0 {
  957. resp := api.ChatResponse{
  958. CreatedAt: time.Now().UTC(),
  959. Model: req.Model,
  960. Done: true,
  961. Message: api.Message{Role: "assistant"},
  962. }
  963. c.JSON(http.StatusOK, resp)
  964. return
  965. }
  966. checkpointLoaded := time.Now()
  967. chat, err := model.ChatPrompts(req.Messages)
  968. if err != nil {
  969. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  970. return
  971. }
  972. prompt, err := trimmedPrompt(c.Request.Context(), chat, model)
  973. if err != nil {
  974. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  975. return
  976. }
  977. slog.Debug(fmt.Sprintf("prompt: %s", prompt))
  978. ch := make(chan any)
  979. go func() {
  980. defer close(ch)
  981. fn := func(r llm.PredictResult) {
  982. // Update model expiration
  983. loaded.expireAt = time.Now().Add(sessionDuration)
  984. loaded.expireTimer.Reset(sessionDuration)
  985. resp := api.ChatResponse{
  986. Model: req.Model,
  987. CreatedAt: time.Now().UTC(),
  988. Message: api.Message{Role: "assistant", Content: r.Content},
  989. Done: r.Done,
  990. Metrics: api.Metrics{
  991. PromptEvalCount: r.PromptEvalCount,
  992. PromptEvalDuration: r.PromptEvalDuration,
  993. EvalCount: r.EvalCount,
  994. EvalDuration: r.EvalDuration,
  995. },
  996. }
  997. if r.Done {
  998. resp.TotalDuration = time.Since(checkpointStart)
  999. resp.LoadDuration = checkpointLoaded.Sub(checkpointStart)
  1000. }
  1001. ch <- resp
  1002. }
  1003. // Start prediction
  1004. predictReq := llm.PredictOpts{
  1005. Prompt: prompt,
  1006. Format: req.Format,
  1007. Images: chat.CurrentImages,
  1008. Options: opts,
  1009. }
  1010. if err := loaded.runner.Predict(c.Request.Context(), predictReq, fn); err != nil {
  1011. ch <- gin.H{"error": err.Error()}
  1012. }
  1013. }()
  1014. if req.Stream != nil && !*req.Stream {
  1015. // Accumulate responses into the final response
  1016. var final api.ChatResponse
  1017. var sb strings.Builder
  1018. for resp := range ch {
  1019. switch r := resp.(type) {
  1020. case api.ChatResponse:
  1021. sb.WriteString(r.Message.Content)
  1022. final = r
  1023. case gin.H:
  1024. if errorMsg, ok := r["error"].(string); ok {
  1025. c.JSON(http.StatusInternalServerError, gin.H{"error": errorMsg})
  1026. return
  1027. } else {
  1028. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error format in response"})
  1029. return
  1030. }
  1031. default:
  1032. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected error"})
  1033. return
  1034. }
  1035. }
  1036. final.Message = api.Message{Role: "assistant", Content: sb.String()}
  1037. c.JSON(http.StatusOK, final)
  1038. return
  1039. }
  1040. streamResponse(c, ch)
  1041. }
  1042. // promptInfo stores the variables used to template a prompt, and the token length of the resulting template for some model
  1043. type promptInfo struct {
  1044. vars PromptVars
  1045. tokenLen int
  1046. }
  1047. // trimmedPrompt builds a prompt to send to a running model. It ensures the prompt fits within the max context length,
  1048. // while preserving the most recent system message.
  1049. func trimmedPrompt(ctx context.Context, chat *ChatHistory, model *Model) (string, error) {
  1050. if len(chat.Prompts) == 0 {
  1051. return "", nil
  1052. }
  1053. var promptsToAdd []promptInfo
  1054. var totalTokenLength int
  1055. var systemPromptIncluded bool
  1056. // reverse iterate through the prompts to build the prompt string in a way that fits the max context length
  1057. for i := len(chat.Prompts) - 1; i >= 0; i-- {
  1058. promptText, err := promptString(model, chat.Prompts[i], i == len(chat.Prompts)-1)
  1059. if err != nil {
  1060. return "", err
  1061. }
  1062. encodedTokens, err := loaded.runner.Encode(ctx, promptText)
  1063. if err != nil {
  1064. return "", err
  1065. }
  1066. if totalTokenLength+len(encodedTokens) > loaded.NumCtx && i != len(chat.Prompts)-1 {
  1067. break // reached max context length, stop adding more prompts
  1068. }
  1069. totalTokenLength += len(encodedTokens)
  1070. systemPromptIncluded = systemPromptIncluded || chat.Prompts[i].System != ""
  1071. promptsToAdd = append(promptsToAdd, promptInfo{vars: chat.Prompts[i], tokenLen: len(encodedTokens)})
  1072. }
  1073. // ensure the system prompt is included, if not already
  1074. if chat.LastSystem != "" && !systemPromptIncluded {
  1075. var err error
  1076. promptsToAdd, err = includeSystemPrompt(ctx, chat.LastSystem, totalTokenLength, promptsToAdd)
  1077. if err != nil {
  1078. return "", err
  1079. }
  1080. }
  1081. promptsToAdd[len(promptsToAdd)-1].vars.First = true
  1082. // construct the final prompt string from the prompts which fit within the context window
  1083. var result string
  1084. for i, prompt := range promptsToAdd {
  1085. promptText, err := promptString(model, prompt.vars, i == 0)
  1086. if err != nil {
  1087. return "", err
  1088. }
  1089. result = promptText + result
  1090. }
  1091. return result, nil
  1092. }
  1093. // promptString applies the model template to the prompt
  1094. func promptString(model *Model, vars PromptVars, isMostRecent bool) (string, error) {
  1095. if isMostRecent {
  1096. p, err := model.PreResponsePrompt(vars)
  1097. if err != nil {
  1098. return "", fmt.Errorf("pre-response template: %w", err)
  1099. }
  1100. return p, nil
  1101. }
  1102. p, err := Prompt(model.Template, vars)
  1103. if err != nil {
  1104. return "", err
  1105. }
  1106. return p, nil
  1107. }
  1108. // includeSystemPrompt adjusts the prompts to include the system prompt.
  1109. func includeSystemPrompt(ctx context.Context, systemPrompt string, totalTokenLength int, promptsToAdd []promptInfo) ([]promptInfo, error) {
  1110. systemTokens, err := loaded.runner.Encode(ctx, systemPrompt)
  1111. if err != nil {
  1112. return nil, err
  1113. }
  1114. for i := len(promptsToAdd) - 1; i >= 0; i-- {
  1115. if totalTokenLength+len(systemTokens) <= loaded.NumCtx {
  1116. promptsToAdd[i].vars.System = systemPrompt
  1117. return promptsToAdd[:i+1], nil
  1118. }
  1119. totalTokenLength -= promptsToAdd[i].tokenLen
  1120. }
  1121. // if got here, system did not fit anywhere, so return the most recent prompt with the system message set
  1122. recent := promptsToAdd[len(promptsToAdd)-1]
  1123. recent.vars.System = systemPrompt
  1124. return []promptInfo{recent}, nil
  1125. }