routes.go 46 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796
  1. package server
  2. import (
  3. "bytes"
  4. "cmp"
  5. "context"
  6. "encoding/binary"
  7. "encoding/json"
  8. "errors"
  9. "fmt"
  10. "io"
  11. "io/fs"
  12. "log/slog"
  13. "math"
  14. "net"
  15. "net/http"
  16. "net/netip"
  17. "os"
  18. "os/signal"
  19. "path/filepath"
  20. "slices"
  21. "strings"
  22. "syscall"
  23. "time"
  24. "github.com/gin-contrib/cors"
  25. "github.com/gin-gonic/gin"
  26. "golang.org/x/sync/errgroup"
  27. "github.com/ollama/ollama/api"
  28. "github.com/ollama/ollama/discover"
  29. "github.com/ollama/ollama/envconfig"
  30. "github.com/ollama/ollama/llama"
  31. "github.com/ollama/ollama/llm"
  32. "github.com/ollama/ollama/openai"
  33. "github.com/ollama/ollama/parser"
  34. "github.com/ollama/ollama/runners"
  35. "github.com/ollama/ollama/server/imageproc"
  36. "github.com/ollama/ollama/template"
  37. "github.com/ollama/ollama/types/errtypes"
  38. "github.com/ollama/ollama/types/model"
  39. "github.com/ollama/ollama/version"
  40. )
  41. var mode string = gin.DebugMode
  42. type Server struct {
  43. addr net.Addr
  44. sched *Scheduler
  45. }
  46. func init() {
  47. switch mode {
  48. case gin.DebugMode:
  49. case gin.ReleaseMode:
  50. case gin.TestMode:
  51. default:
  52. mode = gin.DebugMode
  53. }
  54. gin.SetMode(mode)
  55. }
  56. var (
  57. errRequired = errors.New("is required")
  58. errBadTemplate = errors.New("template error")
  59. )
  60. func modelOptions(model *Model, requestOpts map[string]interface{}) (api.Options, error) {
  61. opts := api.DefaultOptions()
  62. if err := opts.FromMap(model.Options); err != nil {
  63. return api.Options{}, err
  64. }
  65. if err := opts.FromMap(requestOpts); err != nil {
  66. return api.Options{}, err
  67. }
  68. return opts, nil
  69. }
  70. // scheduleRunner schedules a runner after validating inputs such as capabilities and model options.
  71. // It returns the allocated runner, model instance, and consolidated options if successful and error otherwise.
  72. func (s *Server) scheduleRunner(ctx context.Context, name string, caps []Capability, requestOpts map[string]any, keepAlive *api.Duration) (llm.LlamaServer, *Model, *api.Options, error) {
  73. if name == "" {
  74. return nil, nil, nil, fmt.Errorf("model %w", errRequired)
  75. }
  76. model, err := GetModel(name)
  77. if err != nil {
  78. return nil, nil, nil, err
  79. }
  80. if err := model.CheckCapabilities(caps...); err != nil {
  81. return nil, nil, nil, fmt.Errorf("%s %w", name, err)
  82. }
  83. opts, err := modelOptions(model, requestOpts)
  84. if err != nil {
  85. return nil, nil, nil, err
  86. }
  87. runnerCh, errCh := s.sched.GetRunner(ctx, model, opts, keepAlive)
  88. var runner *runnerRef
  89. select {
  90. case runner = <-runnerCh:
  91. case err = <-errCh:
  92. return nil, nil, nil, err
  93. }
  94. return runner.llama, model, &opts, nil
  95. }
  96. func (s *Server) GenerateHandler(c *gin.Context) {
  97. checkpointStart := time.Now()
  98. var req api.GenerateRequest
  99. if err := c.ShouldBindJSON(&req); errors.Is(err, io.EOF) {
  100. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  101. return
  102. } else if err != nil {
  103. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  104. return
  105. }
  106. name := model.ParseName(req.Model)
  107. if !name.IsValid() {
  108. // Ideally this is "invalid model name" but we're keeping with
  109. // what the API currently returns until we can change it.
  110. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  111. return
  112. }
  113. // We cannot currently consolidate this into GetModel because all we'll
  114. // induce infinite recursion given the current code structure.
  115. name, err := getExistingName(name)
  116. if err != nil {
  117. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  118. return
  119. }
  120. model, err := GetModel(name.String())
  121. if err != nil {
  122. switch {
  123. case errors.Is(err, fs.ErrNotExist):
  124. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  125. case err.Error() == "invalid model name":
  126. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  127. default:
  128. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  129. }
  130. return
  131. }
  132. // expire the runner
  133. if req.Prompt == "" && req.KeepAlive != nil && int(req.KeepAlive.Seconds()) == 0 {
  134. s.sched.expireRunner(model)
  135. c.JSON(http.StatusOK, api.GenerateResponse{
  136. Model: req.Model,
  137. CreatedAt: time.Now().UTC(),
  138. Response: "",
  139. Done: true,
  140. DoneReason: "unload",
  141. })
  142. return
  143. }
  144. if req.Raw && (req.Template != "" || req.System != "" || len(req.Context) > 0) {
  145. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "raw mode does not support template, system, or context"})
  146. return
  147. }
  148. caps := []Capability{CapabilityCompletion}
  149. if req.Suffix != "" {
  150. caps = append(caps, CapabilityInsert)
  151. }
  152. r, m, opts, err := s.scheduleRunner(c.Request.Context(), name.String(), caps, req.Options, req.KeepAlive)
  153. if errors.Is(err, errCapabilityCompletion) {
  154. c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("%q does not support generate", req.Model)})
  155. return
  156. } else if err != nil {
  157. handleScheduleError(c, req.Model, err)
  158. return
  159. }
  160. checkpointLoaded := time.Now()
  161. // load the model
  162. if req.Prompt == "" {
  163. c.JSON(http.StatusOK, api.GenerateResponse{
  164. Model: req.Model,
  165. CreatedAt: time.Now().UTC(),
  166. Done: true,
  167. DoneReason: "load",
  168. })
  169. return
  170. }
  171. isMllama := checkMllamaModelFamily(model)
  172. if isMllama && len(req.Images) > 1 {
  173. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "this model only supports one image: more than one image sent"})
  174. return
  175. }
  176. images := make([]llm.ImageData, len(req.Images))
  177. for i := range req.Images {
  178. if isMllama {
  179. data, aspectRatioID, err := imageproc.Preprocess(req.Images[i])
  180. if err != nil {
  181. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "error processing image"})
  182. return
  183. }
  184. buf := new(bytes.Buffer)
  185. err = binary.Write(buf, binary.LittleEndian, data)
  186. if err != nil {
  187. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "error processing image"})
  188. return
  189. }
  190. images[i] = llm.ImageData{ID: i, Data: buf.Bytes(), AspectRatioID: aspectRatioID}
  191. } else {
  192. images[i] = llm.ImageData{ID: i, Data: req.Images[i]}
  193. }
  194. }
  195. prompt := req.Prompt
  196. if !req.Raw {
  197. tmpl := m.Template
  198. if req.Template != "" {
  199. tmpl, err = template.Parse(req.Template)
  200. if err != nil {
  201. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  202. return
  203. }
  204. }
  205. var values template.Values
  206. if req.Suffix != "" {
  207. values.Prompt = prompt
  208. values.Suffix = req.Suffix
  209. } else {
  210. var msgs []api.Message
  211. if req.System != "" {
  212. msgs = append(msgs, api.Message{Role: "system", Content: req.System})
  213. } else if m.System != "" {
  214. msgs = append(msgs, api.Message{Role: "system", Content: m.System})
  215. }
  216. if req.Context == nil {
  217. msgs = append(msgs, m.Messages...)
  218. }
  219. for _, i := range images {
  220. imgPrompt := ""
  221. if isMllama {
  222. imgPrompt = "<|image|>"
  223. }
  224. msgs = append(msgs, api.Message{Role: "user", Content: fmt.Sprintf("[img-%d]"+imgPrompt, i.ID)})
  225. }
  226. values.Messages = append(msgs, api.Message{Role: "user", Content: req.Prompt})
  227. }
  228. var b bytes.Buffer
  229. if req.Context != nil {
  230. slog.Warn("the context field is deprecated and will be removed in a future version of Ollama")
  231. s, err := r.Detokenize(c.Request.Context(), req.Context)
  232. if err != nil {
  233. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  234. return
  235. }
  236. b.WriteString(s)
  237. }
  238. if err := tmpl.Execute(&b, values); err != nil {
  239. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  240. return
  241. }
  242. prompt = b.String()
  243. }
  244. slog.Debug("generate request", "images", len(images), "prompt", prompt)
  245. ch := make(chan any)
  246. go func() {
  247. // TODO (jmorganca): avoid building the response twice both here and below
  248. var sb strings.Builder
  249. defer close(ch)
  250. if err := r.Completion(c.Request.Context(), llm.CompletionRequest{
  251. Prompt: prompt,
  252. Images: images,
  253. Format: req.Format,
  254. Options: opts,
  255. }, func(cr llm.CompletionResponse) {
  256. res := api.GenerateResponse{
  257. Model: req.Model,
  258. CreatedAt: time.Now().UTC(),
  259. Response: cr.Content,
  260. Done: cr.Done,
  261. DoneReason: cr.DoneReason,
  262. Metrics: api.Metrics{
  263. PromptEvalCount: cr.PromptEvalCount,
  264. PromptEvalDuration: cr.PromptEvalDuration,
  265. EvalCount: cr.EvalCount,
  266. EvalDuration: cr.EvalDuration,
  267. },
  268. }
  269. if _, err := sb.WriteString(cr.Content); err != nil {
  270. ch <- gin.H{"error": err.Error()}
  271. }
  272. if cr.Done {
  273. res.TotalDuration = time.Since(checkpointStart)
  274. res.LoadDuration = checkpointLoaded.Sub(checkpointStart)
  275. if !req.Raw {
  276. tokens, err := r.Tokenize(c.Request.Context(), prompt+sb.String())
  277. if err != nil {
  278. ch <- gin.H{"error": err.Error()}
  279. return
  280. }
  281. res.Context = tokens
  282. }
  283. }
  284. ch <- res
  285. }); err != nil {
  286. ch <- gin.H{"error": err.Error()}
  287. }
  288. }()
  289. if req.Stream != nil && !*req.Stream {
  290. var r api.GenerateResponse
  291. var sb strings.Builder
  292. for rr := range ch {
  293. switch t := rr.(type) {
  294. case api.GenerateResponse:
  295. sb.WriteString(t.Response)
  296. r = t
  297. case gin.H:
  298. msg, ok := t["error"].(string)
  299. if !ok {
  300. msg = "unexpected error format in response"
  301. }
  302. c.JSON(http.StatusInternalServerError, gin.H{"error": msg})
  303. return
  304. default:
  305. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected response"})
  306. return
  307. }
  308. }
  309. r.Response = sb.String()
  310. c.JSON(http.StatusOK, r)
  311. return
  312. }
  313. streamResponse(c, ch)
  314. }
  315. func (s *Server) EmbedHandler(c *gin.Context) {
  316. checkpointStart := time.Now()
  317. var req api.EmbedRequest
  318. err := c.ShouldBindJSON(&req)
  319. switch {
  320. case errors.Is(err, io.EOF):
  321. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  322. return
  323. case err != nil:
  324. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  325. return
  326. }
  327. truncate := true
  328. if req.Truncate != nil && !*req.Truncate {
  329. truncate = false
  330. }
  331. var input []string
  332. switch i := req.Input.(type) {
  333. case string:
  334. if len(i) > 0 {
  335. input = append(input, i)
  336. }
  337. case []any:
  338. for _, v := range i {
  339. if _, ok := v.(string); !ok {
  340. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "invalid input type"})
  341. return
  342. }
  343. input = append(input, v.(string))
  344. }
  345. default:
  346. if req.Input != nil {
  347. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "invalid input type"})
  348. return
  349. }
  350. }
  351. name, err := getExistingName(model.ParseName(req.Model))
  352. if err != nil {
  353. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  354. return
  355. }
  356. r, m, opts, err := s.scheduleRunner(c.Request.Context(), name.String(), []Capability{}, req.Options, req.KeepAlive)
  357. if err != nil {
  358. handleScheduleError(c, req.Model, err)
  359. return
  360. }
  361. checkpointLoaded := time.Now()
  362. if len(input) == 0 {
  363. c.JSON(http.StatusOK, api.EmbedResponse{Model: req.Model, Embeddings: [][]float32{}})
  364. return
  365. }
  366. kvData, err := getKVData(m.ModelPath, false)
  367. if err != nil {
  368. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  369. return
  370. }
  371. var count int
  372. for i, s := range input {
  373. tokens, err := r.Tokenize(c.Request.Context(), s)
  374. if err != nil {
  375. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  376. return
  377. }
  378. ctxLen := min(opts.NumCtx, int(kvData.ContextLength()))
  379. if len(tokens) > ctxLen {
  380. if !truncate {
  381. c.JSON(http.StatusBadRequest, gin.H{"error": "input length exceeds maximum context length"})
  382. return
  383. }
  384. tokens = tokens[:ctxLen]
  385. s, err = r.Detokenize(c.Request.Context(), tokens)
  386. if err != nil {
  387. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  388. return
  389. }
  390. }
  391. count += len(tokens)
  392. input[i] = s
  393. }
  394. var g errgroup.Group
  395. embeddings := make([][]float32, len(input))
  396. for i, text := range input {
  397. g.Go(func() error {
  398. embedding, err := r.Embedding(c.Request.Context(), text)
  399. if err != nil {
  400. return err
  401. }
  402. embeddings[i] = normalize(embedding)
  403. return nil
  404. })
  405. }
  406. if err := g.Wait(); err != nil {
  407. slog.Error("embedding generation failed", "error", err)
  408. c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Errorf("failed to generate embeddings: %v", err)})
  409. return
  410. }
  411. resp := api.EmbedResponse{
  412. Model: req.Model,
  413. Embeddings: embeddings,
  414. TotalDuration: time.Since(checkpointStart),
  415. LoadDuration: checkpointLoaded.Sub(checkpointStart),
  416. PromptEvalCount: count,
  417. }
  418. c.JSON(http.StatusOK, resp)
  419. }
  420. func normalize(vec []float32) []float32 {
  421. var sum float32
  422. for _, v := range vec {
  423. sum += v * v
  424. }
  425. norm := float32(0.0)
  426. if sum > 0 {
  427. norm = float32(1.0 / math.Sqrt(float64(sum)))
  428. }
  429. for i := range vec {
  430. vec[i] *= norm
  431. }
  432. return vec
  433. }
  434. func (s *Server) EmbeddingsHandler(c *gin.Context) {
  435. var req api.EmbeddingRequest
  436. if err := c.ShouldBindJSON(&req); errors.Is(err, io.EOF) {
  437. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  438. return
  439. } else if err != nil {
  440. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  441. return
  442. }
  443. name := model.ParseName(req.Model)
  444. if !name.IsValid() {
  445. c.JSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  446. return
  447. }
  448. r, _, _, err := s.scheduleRunner(c.Request.Context(), name.String(), []Capability{}, req.Options, req.KeepAlive)
  449. if err != nil {
  450. handleScheduleError(c, req.Model, err)
  451. return
  452. }
  453. // an empty request loads the model
  454. if req.Prompt == "" {
  455. c.JSON(http.StatusOK, api.EmbeddingResponse{Embedding: []float64{}})
  456. return
  457. }
  458. embedding, err := r.Embedding(c.Request.Context(), req.Prompt)
  459. if err != nil {
  460. slog.Info(fmt.Sprintf("embedding generation failed: %v", err))
  461. c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Errorf("failed to generate embedding: %v", err)})
  462. return
  463. }
  464. var e []float64
  465. for _, v := range embedding {
  466. e = append(e, float64(v))
  467. }
  468. resp := api.EmbeddingResponse{
  469. Embedding: e,
  470. }
  471. c.JSON(http.StatusOK, resp)
  472. }
  473. func (s *Server) TokenizeHandler(w http.ResponseWriter, r *http.Request) {
  474. if r.Method != http.MethodPost {
  475. http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
  476. return
  477. }
  478. var req api.TokenizeRequest
  479. if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
  480. if errors.Is(err, io.EOF) {
  481. http.Error(w, "missing request body", http.StatusBadRequest)
  482. return
  483. }
  484. http.Error(w, err.Error(), http.StatusBadRequest)
  485. return
  486. }
  487. if req.Text == "" {
  488. http.Error(w, "missing `text` for tokenization", http.StatusBadRequest)
  489. return
  490. }
  491. if req.Model == "" {
  492. http.Error(w, "missing `model` for tokenization", http.StatusBadRequest)
  493. return
  494. }
  495. name := model.ParseName(req.Model)
  496. if !name.IsValid() {
  497. http.Error(w, fmt.Sprintf("model name `%q` is invalid", req.Model), http.StatusBadRequest)
  498. return
  499. }
  500. name, err := getExistingName(name)
  501. if err != nil {
  502. http.Error(w, fmt.Sprintf("model `%s` not found", req.Model), http.StatusNotFound)
  503. return
  504. }
  505. // Get local model path
  506. modelPath, err := GetModel(name.String())
  507. if err != nil {
  508. http.Error(w, fmt.Sprintf("model `%s` not found", req.Model), http.StatusNotFound)
  509. return
  510. }
  511. model, err := llama.LoadModelFromFile(modelPath.ModelPath, llama.ModelParams{
  512. VocabOnly: true,
  513. UseMmap: true,
  514. })
  515. if err != nil {
  516. http.Error(w, fmt.Sprintf("failed to load model: %v", err), http.StatusInternalServerError)
  517. return
  518. }
  519. defer llama.FreeModel(model)
  520. // Tokenize the text
  521. tokens, err := model.Tokenize(req.Text, false, true)
  522. if err != nil {
  523. http.Error(w, fmt.Sprintf("failed to tokenize text: %v", err), http.StatusInternalServerError)
  524. return
  525. }
  526. w.Header().Set("Content-Type", "application/json")
  527. if err := json.NewEncoder(w).Encode(api.TokenizeResponse{
  528. Tokens: tokens,
  529. }); err != nil {
  530. http.Error(w, fmt.Sprintf("failed to encode response: %v", err), http.StatusInternalServerError)
  531. return
  532. }
  533. }
  534. func (s *Server) DetokenizeHandler(w http.ResponseWriter, r *http.Request) {
  535. if r.Method != http.MethodPost {
  536. http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
  537. return
  538. }
  539. var req api.DetokenizeRequest
  540. if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
  541. if errors.Is(err, io.EOF) {
  542. http.Error(w, "missing request body", http.StatusBadRequest)
  543. return
  544. }
  545. http.Error(w, err.Error(), http.StatusBadRequest)
  546. return
  547. }
  548. if req.Tokens == nil {
  549. http.Error(w, "missing tokens for detokenization", http.StatusBadRequest)
  550. return
  551. }
  552. if req.Model == "" {
  553. http.Error(w, "missing `model` for detokenization", http.StatusBadRequest)
  554. return
  555. }
  556. name := model.ParseName(req.Model)
  557. if !name.IsValid() {
  558. http.Error(w, fmt.Sprintf("model name `%q` is invalid", req.Model), http.StatusBadRequest)
  559. return
  560. }
  561. name, err := getExistingName(name)
  562. if err != nil {
  563. http.Error(w, fmt.Sprintf("model `%s` not found", req.Model), http.StatusNotFound)
  564. return
  565. }
  566. // Get local model path
  567. modelPath, err := GetModel(name.String())
  568. if err != nil {
  569. http.Error(w, fmt.Sprintf("model `%s` not found", req.Model), http.StatusNotFound)
  570. return
  571. }
  572. model, err := llama.LoadModelFromFile(modelPath.ModelPath, llama.ModelParams{
  573. VocabOnly: true,
  574. UseMmap: true,
  575. })
  576. if err != nil {
  577. http.Error(w, fmt.Sprintf("failed to load model: %v", err), http.StatusInternalServerError)
  578. return
  579. }
  580. defer llama.FreeModel(model)
  581. var text string
  582. for _, token := range req.Tokens {
  583. text += model.TokenToPiece(token)
  584. }
  585. w.Header().Set("Content-Type", "application/json")
  586. if err := json.NewEncoder(w).Encode(api.DetokenizeResponse{
  587. Text: text,
  588. }); err != nil {
  589. http.Error(w, fmt.Sprintf("failed to encode response: %v", err), http.StatusInternalServerError)
  590. return
  591. }
  592. }
  593. func (s *Server) PullHandler(c *gin.Context) {
  594. var req api.PullRequest
  595. err := c.ShouldBindJSON(&req)
  596. switch {
  597. case errors.Is(err, io.EOF):
  598. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  599. return
  600. case err != nil:
  601. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  602. return
  603. }
  604. name := model.ParseName(cmp.Or(req.Model, req.Name))
  605. if !name.IsValid() {
  606. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "invalid model name"})
  607. return
  608. }
  609. name, err = getExistingName(name)
  610. if err != nil {
  611. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  612. return
  613. }
  614. ch := make(chan any)
  615. go func() {
  616. defer close(ch)
  617. fn := func(r api.ProgressResponse) {
  618. ch <- r
  619. }
  620. regOpts := &registryOptions{
  621. Insecure: req.Insecure,
  622. }
  623. ctx, cancel := context.WithCancel(c.Request.Context())
  624. defer cancel()
  625. if err := PullModel(ctx, name.DisplayShortest(), regOpts, fn); err != nil {
  626. ch <- gin.H{"error": err.Error()}
  627. }
  628. }()
  629. if req.Stream != nil && !*req.Stream {
  630. waitForStream(c, ch)
  631. return
  632. }
  633. streamResponse(c, ch)
  634. }
  635. func (s *Server) PushHandler(c *gin.Context) {
  636. var req api.PushRequest
  637. err := c.ShouldBindJSON(&req)
  638. switch {
  639. case errors.Is(err, io.EOF):
  640. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  641. return
  642. case err != nil:
  643. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  644. return
  645. }
  646. var mname string
  647. if req.Model != "" {
  648. mname = req.Model
  649. } else if req.Name != "" {
  650. mname = req.Name
  651. } else {
  652. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  653. return
  654. }
  655. ch := make(chan any)
  656. go func() {
  657. defer close(ch)
  658. fn := func(r api.ProgressResponse) {
  659. ch <- r
  660. }
  661. regOpts := &registryOptions{
  662. Insecure: req.Insecure,
  663. }
  664. ctx, cancel := context.WithCancel(c.Request.Context())
  665. defer cancel()
  666. name, err := getExistingName(model.ParseName(mname))
  667. if err != nil {
  668. ch <- gin.H{"error": err.Error()}
  669. return
  670. }
  671. if err := PushModel(ctx, name.DisplayShortest(), regOpts, fn); err != nil {
  672. ch <- gin.H{"error": err.Error()}
  673. }
  674. }()
  675. if req.Stream != nil && !*req.Stream {
  676. waitForStream(c, ch)
  677. return
  678. }
  679. streamResponse(c, ch)
  680. }
  681. // getExistingName searches the models directory for the longest prefix match of
  682. // the input name and returns the input name with all existing parts replaced
  683. // with each part found. If no parts are found, the input name is returned as
  684. // is.
  685. func getExistingName(n model.Name) (model.Name, error) {
  686. var zero model.Name
  687. existing, err := Manifests(true)
  688. if err != nil {
  689. return zero, err
  690. }
  691. var set model.Name // tracks parts already canonicalized
  692. for e := range existing {
  693. if set.Host == "" && strings.EqualFold(e.Host, n.Host) {
  694. n.Host = e.Host
  695. }
  696. if set.Namespace == "" && strings.EqualFold(e.Namespace, n.Namespace) {
  697. n.Namespace = e.Namespace
  698. }
  699. if set.Model == "" && strings.EqualFold(e.Model, n.Model) {
  700. n.Model = e.Model
  701. }
  702. if set.Tag == "" && strings.EqualFold(e.Tag, n.Tag) {
  703. n.Tag = e.Tag
  704. }
  705. }
  706. return n, nil
  707. }
  708. func (s *Server) CreateHandler(c *gin.Context) {
  709. var r api.CreateRequest
  710. if err := c.ShouldBindJSON(&r); errors.Is(err, io.EOF) {
  711. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  712. return
  713. } else if err != nil {
  714. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  715. return
  716. }
  717. name := model.ParseName(cmp.Or(r.Model, r.Name))
  718. if !name.IsValid() {
  719. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": errtypes.InvalidModelNameErrMsg})
  720. return
  721. }
  722. name, err := getExistingName(name)
  723. if err != nil {
  724. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  725. return
  726. }
  727. if r.Path == "" && r.Modelfile == "" {
  728. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "path or Modelfile are required"})
  729. return
  730. }
  731. var sr io.Reader = strings.NewReader(r.Modelfile)
  732. if r.Path != "" && r.Modelfile == "" {
  733. f, err := os.Open(r.Path)
  734. if err != nil {
  735. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("error reading modelfile: %s", err)})
  736. return
  737. }
  738. defer f.Close()
  739. sr = f
  740. }
  741. f, err := parser.ParseFile(sr)
  742. if err != nil {
  743. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  744. return
  745. }
  746. ch := make(chan any)
  747. go func() {
  748. defer close(ch)
  749. fn := func(resp api.ProgressResponse) {
  750. ch <- resp
  751. }
  752. ctx, cancel := context.WithCancel(c.Request.Context())
  753. defer cancel()
  754. quantization := cmp.Or(r.Quantize, r.Quantization)
  755. if err := CreateModel(ctx, name, filepath.Dir(r.Path), strings.ToUpper(quantization), f, fn); errors.Is(err, errBadTemplate) {
  756. ch <- gin.H{"error": err.Error(), "status": http.StatusBadRequest}
  757. } else if err != nil {
  758. ch <- gin.H{"error": err.Error()}
  759. }
  760. }()
  761. if r.Stream != nil && !*r.Stream {
  762. waitForStream(c, ch)
  763. return
  764. }
  765. streamResponse(c, ch)
  766. }
  767. func (s *Server) DeleteHandler(c *gin.Context) {
  768. var r api.DeleteRequest
  769. if err := c.ShouldBindJSON(&r); errors.Is(err, io.EOF) {
  770. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  771. return
  772. } else if err != nil {
  773. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  774. return
  775. }
  776. n := model.ParseName(cmp.Or(r.Model, r.Name))
  777. if !n.IsValid() {
  778. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("name %q is invalid", cmp.Or(r.Model, r.Name))})
  779. return
  780. }
  781. n, err := getExistingName(n)
  782. if err != nil {
  783. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", cmp.Or(r.Model, r.Name))})
  784. return
  785. }
  786. m, err := ParseNamedManifest(n)
  787. if err != nil {
  788. switch {
  789. case os.IsNotExist(err):
  790. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", cmp.Or(r.Model, r.Name))})
  791. default:
  792. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  793. }
  794. return
  795. }
  796. if err := m.Remove(); err != nil {
  797. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  798. return
  799. }
  800. if err := m.RemoveLayers(); err != nil {
  801. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  802. return
  803. }
  804. }
  805. func (s *Server) ShowHandler(c *gin.Context) {
  806. var req api.ShowRequest
  807. err := c.ShouldBindJSON(&req)
  808. switch {
  809. case errors.Is(err, io.EOF):
  810. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  811. return
  812. case err != nil:
  813. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  814. return
  815. }
  816. if req.Model != "" {
  817. // noop
  818. } else if req.Name != "" {
  819. req.Model = req.Name
  820. } else {
  821. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  822. return
  823. }
  824. resp, err := GetModelInfo(req)
  825. if err != nil {
  826. switch {
  827. case os.IsNotExist(err):
  828. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  829. case err.Error() == "invalid model name":
  830. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  831. default:
  832. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  833. }
  834. return
  835. }
  836. c.JSON(http.StatusOK, resp)
  837. }
  838. func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
  839. name := model.ParseName(req.Model)
  840. if !name.IsValid() {
  841. return nil, errModelPathInvalid
  842. }
  843. name, err := getExistingName(name)
  844. if err != nil {
  845. return nil, err
  846. }
  847. m, err := GetModel(name.String())
  848. if err != nil {
  849. return nil, err
  850. }
  851. modelDetails := api.ModelDetails{
  852. ParentModel: m.ParentModel,
  853. Format: m.Config.ModelFormat,
  854. Family: m.Config.ModelFamily,
  855. Families: m.Config.ModelFamilies,
  856. ParameterSize: m.Config.ModelType,
  857. QuantizationLevel: m.Config.FileType,
  858. }
  859. if req.System != "" {
  860. m.System = req.System
  861. }
  862. msgs := make([]api.Message, len(m.Messages))
  863. for i, msg := range m.Messages {
  864. msgs[i] = api.Message{Role: msg.Role, Content: msg.Content}
  865. }
  866. manifest, err := ParseNamedManifest(name)
  867. if err != nil {
  868. return nil, err
  869. }
  870. resp := &api.ShowResponse{
  871. License: strings.Join(m.License, "\n"),
  872. System: m.System,
  873. Template: m.Template.String(),
  874. Details: modelDetails,
  875. Messages: msgs,
  876. ModifiedAt: manifest.fi.ModTime(),
  877. }
  878. var params []string
  879. cs := 30
  880. for k, v := range m.Options {
  881. switch val := v.(type) {
  882. case []interface{}:
  883. for _, nv := range val {
  884. params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv))
  885. }
  886. default:
  887. params = append(params, fmt.Sprintf("%-*s %#v", cs, k, v))
  888. }
  889. }
  890. resp.Parameters = strings.Join(params, "\n")
  891. for k, v := range req.Options {
  892. if _, ok := req.Options[k]; ok {
  893. m.Options[k] = v
  894. }
  895. }
  896. var sb strings.Builder
  897. fmt.Fprintln(&sb, "# Modelfile generated by \"ollama show\"")
  898. fmt.Fprintln(&sb, "# To build a new Modelfile based on this, replace FROM with:")
  899. fmt.Fprintf(&sb, "# FROM %s\n\n", m.ShortName)
  900. fmt.Fprint(&sb, m.String())
  901. resp.Modelfile = sb.String()
  902. kvData, err := getKVData(m.ModelPath, req.Verbose)
  903. if err != nil {
  904. return nil, err
  905. }
  906. delete(kvData, "general.name")
  907. delete(kvData, "tokenizer.chat_template")
  908. resp.ModelInfo = kvData
  909. if len(m.ProjectorPaths) > 0 {
  910. projectorData, err := getKVData(m.ProjectorPaths[0], req.Verbose)
  911. if err != nil {
  912. return nil, err
  913. }
  914. resp.ProjectorInfo = projectorData
  915. }
  916. return resp, nil
  917. }
  918. func getKVData(digest string, verbose bool) (llm.KV, error) {
  919. maxArraySize := 0
  920. if verbose {
  921. maxArraySize = -1
  922. }
  923. kvData, err := llm.LoadModel(digest, maxArraySize)
  924. if err != nil {
  925. return nil, err
  926. }
  927. kv := kvData.KV()
  928. if !verbose {
  929. for k := range kv {
  930. if t, ok := kv[k].([]any); len(t) > 5 && ok {
  931. kv[k] = []any{}
  932. }
  933. }
  934. }
  935. return kv, nil
  936. }
  937. func (s *Server) ListHandler(c *gin.Context) {
  938. ms, err := Manifests(true)
  939. if err != nil {
  940. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  941. return
  942. }
  943. models := []api.ListModelResponse{}
  944. for n, m := range ms {
  945. var cf ConfigV2
  946. if m.Config.Digest != "" {
  947. f, err := m.Config.Open()
  948. if err != nil {
  949. slog.Warn("bad manifest filepath", "name", n, "error", err)
  950. continue
  951. }
  952. defer f.Close()
  953. if err := json.NewDecoder(f).Decode(&cf); err != nil {
  954. slog.Warn("bad manifest config", "name", n, "error", err)
  955. continue
  956. }
  957. }
  958. // tag should never be masked
  959. models = append(models, api.ListModelResponse{
  960. Model: n.DisplayShortest(),
  961. Name: n.DisplayShortest(),
  962. Size: m.Size(),
  963. Digest: m.digest,
  964. ModifiedAt: m.fi.ModTime(),
  965. Details: api.ModelDetails{
  966. Format: cf.ModelFormat,
  967. Family: cf.ModelFamily,
  968. Families: cf.ModelFamilies,
  969. ParameterSize: cf.ModelType,
  970. QuantizationLevel: cf.FileType,
  971. },
  972. })
  973. }
  974. slices.SortStableFunc(models, func(i, j api.ListModelResponse) int {
  975. // most recently modified first
  976. return cmp.Compare(j.ModifiedAt.Unix(), i.ModifiedAt.Unix())
  977. })
  978. c.JSON(http.StatusOK, api.ListResponse{Models: models})
  979. }
  980. func (s *Server) CopyHandler(c *gin.Context) {
  981. var r api.CopyRequest
  982. if err := c.ShouldBindJSON(&r); errors.Is(err, io.EOF) {
  983. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  984. return
  985. } else if err != nil {
  986. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  987. return
  988. }
  989. src := model.ParseName(r.Source)
  990. if !src.IsValid() {
  991. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("source %q is invalid", r.Source)})
  992. return
  993. }
  994. src, err := getExistingName(src)
  995. if err != nil {
  996. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  997. return
  998. }
  999. dst := model.ParseName(r.Destination)
  1000. if !dst.IsValid() {
  1001. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("destination %q is invalid", r.Destination)})
  1002. return
  1003. }
  1004. dst, err = getExistingName(dst)
  1005. if err != nil {
  1006. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  1007. return
  1008. }
  1009. if err := CopyModel(src, dst); errors.Is(err, os.ErrNotExist) {
  1010. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model %q not found", r.Source)})
  1011. } else if err != nil {
  1012. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1013. }
  1014. }
  1015. func (s *Server) HeadBlobHandler(c *gin.Context) {
  1016. path, err := GetBlobsPath(c.Param("digest"))
  1017. if err != nil {
  1018. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  1019. return
  1020. }
  1021. if _, err := os.Stat(path); err != nil {
  1022. c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("blob %q not found", c.Param("digest"))})
  1023. return
  1024. }
  1025. c.Status(http.StatusOK)
  1026. }
  1027. func (s *Server) CreateBlobHandler(c *gin.Context) {
  1028. if ib, ok := intermediateBlobs[c.Param("digest")]; ok {
  1029. p, err := GetBlobsPath(ib)
  1030. if err != nil {
  1031. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1032. return
  1033. }
  1034. if _, err := os.Stat(p); errors.Is(err, os.ErrNotExist) {
  1035. slog.Info("evicting intermediate blob which no longer exists", "digest", ib)
  1036. delete(intermediateBlobs, c.Param("digest"))
  1037. } else if err != nil {
  1038. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1039. return
  1040. } else {
  1041. c.Status(http.StatusOK)
  1042. return
  1043. }
  1044. }
  1045. path, err := GetBlobsPath(c.Param("digest"))
  1046. if err != nil {
  1047. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  1048. return
  1049. }
  1050. _, err = os.Stat(path)
  1051. switch {
  1052. case errors.Is(err, os.ErrNotExist):
  1053. // noop
  1054. case err != nil:
  1055. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1056. return
  1057. default:
  1058. c.Status(http.StatusOK)
  1059. return
  1060. }
  1061. layer, err := NewLayer(c.Request.Body, "")
  1062. if err != nil {
  1063. c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1064. return
  1065. }
  1066. if layer.Digest != c.Param("digest") {
  1067. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("digest mismatch, expected %q, got %q", c.Param("digest"), layer.Digest)})
  1068. return
  1069. }
  1070. c.Status(http.StatusCreated)
  1071. }
  1072. func isLocalIP(ip netip.Addr) bool {
  1073. if interfaces, err := net.Interfaces(); err == nil {
  1074. for _, iface := range interfaces {
  1075. addrs, err := iface.Addrs()
  1076. if err != nil {
  1077. continue
  1078. }
  1079. for _, a := range addrs {
  1080. if parsed, _, err := net.ParseCIDR(a.String()); err == nil {
  1081. if parsed.String() == ip.String() {
  1082. return true
  1083. }
  1084. }
  1085. }
  1086. }
  1087. }
  1088. return false
  1089. }
  1090. func allowedHost(host string) bool {
  1091. host = strings.ToLower(host)
  1092. if host == "" || host == "localhost" {
  1093. return true
  1094. }
  1095. if hostname, err := os.Hostname(); err == nil && host == strings.ToLower(hostname) {
  1096. return true
  1097. }
  1098. tlds := []string{
  1099. "localhost",
  1100. "local",
  1101. "internal",
  1102. }
  1103. // check if the host is a local TLD
  1104. for _, tld := range tlds {
  1105. if strings.HasSuffix(host, "."+tld) {
  1106. return true
  1107. }
  1108. }
  1109. return false
  1110. }
  1111. func allowedHostsMiddleware(addr net.Addr) gin.HandlerFunc {
  1112. return func(c *gin.Context) {
  1113. if addr == nil {
  1114. c.Next()
  1115. return
  1116. }
  1117. if addr, err := netip.ParseAddrPort(addr.String()); err == nil && !addr.Addr().IsLoopback() {
  1118. c.Next()
  1119. return
  1120. }
  1121. host, _, err := net.SplitHostPort(c.Request.Host)
  1122. if err != nil {
  1123. host = c.Request.Host
  1124. }
  1125. if addr, err := netip.ParseAddr(host); err == nil {
  1126. if addr.IsLoopback() || addr.IsPrivate() || addr.IsUnspecified() || isLocalIP(addr) {
  1127. c.Next()
  1128. return
  1129. }
  1130. }
  1131. if allowedHost(host) {
  1132. if c.Request.Method == http.MethodOptions {
  1133. c.AbortWithStatus(http.StatusNoContent)
  1134. return
  1135. }
  1136. c.Next()
  1137. return
  1138. }
  1139. c.AbortWithStatus(http.StatusForbidden)
  1140. }
  1141. }
  1142. func (s *Server) GenerateRoutes() http.Handler {
  1143. config := cors.DefaultConfig()
  1144. config.AllowWildcard = true
  1145. config.AllowBrowserExtensions = true
  1146. config.AllowHeaders = []string{"Authorization", "Content-Type", "User-Agent", "Accept", "X-Requested-With"}
  1147. openAIProperties := []string{"lang", "package-version", "os", "arch", "retry-count", "runtime", "runtime-version", "async"}
  1148. for _, prop := range openAIProperties {
  1149. config.AllowHeaders = append(config.AllowHeaders, "x-stainless-"+prop)
  1150. }
  1151. config.AllowOrigins = envconfig.Origins()
  1152. r := gin.Default()
  1153. r.Use(
  1154. cors.New(config),
  1155. allowedHostsMiddleware(s.addr),
  1156. )
  1157. r.POST("/api/pull", s.PullHandler)
  1158. r.POST("/api/generate", s.GenerateHandler)
  1159. r.POST("/api/chat", s.ChatHandler)
  1160. r.POST("/api/embed", s.EmbedHandler)
  1161. r.POST("/api/embeddings", s.EmbeddingsHandler)
  1162. r.Any("/api/tokenize", gin.WrapF(s.TokenizeHandler))
  1163. r.Any("/api/detokenize", gin.WrapF(s.DetokenizeHandler))
  1164. r.POST("/api/create", s.CreateHandler)
  1165. r.POST("/api/push", s.PushHandler)
  1166. r.POST("/api/copy", s.CopyHandler)
  1167. r.DELETE("/api/delete", s.DeleteHandler)
  1168. r.POST("/api/show", s.ShowHandler)
  1169. r.POST("/api/blobs/:digest", s.CreateBlobHandler)
  1170. r.HEAD("/api/blobs/:digest", s.HeadBlobHandler)
  1171. r.GET("/api/ps", s.PsHandler)
  1172. // Compatibility endpoints
  1173. r.POST("/v1/chat/completions", openai.ChatMiddleware(), s.ChatHandler)
  1174. r.POST("/v1/completions", openai.CompletionsMiddleware(), s.GenerateHandler)
  1175. r.POST("/v1/embeddings", openai.EmbeddingsMiddleware(), s.EmbedHandler)
  1176. r.GET("/v1/models", openai.ListMiddleware(), s.ListHandler)
  1177. r.GET("/v1/models/:model", openai.RetrieveMiddleware(), s.ShowHandler)
  1178. for _, method := range []string{http.MethodGet, http.MethodHead} {
  1179. r.Handle(method, "/", func(c *gin.Context) {
  1180. c.String(http.StatusOK, "Ollama is running")
  1181. })
  1182. r.Handle(method, "/api/tags", s.ListHandler)
  1183. r.Handle(method, "/api/version", func(c *gin.Context) {
  1184. c.JSON(http.StatusOK, gin.H{"version": version.Version})
  1185. })
  1186. }
  1187. return r
  1188. }
  1189. func Serve(ln net.Listener) error {
  1190. level := slog.LevelInfo
  1191. if envconfig.Debug() {
  1192. level = slog.LevelDebug
  1193. }
  1194. slog.Info("server config", "env", envconfig.Values())
  1195. handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
  1196. Level: level,
  1197. AddSource: true,
  1198. ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
  1199. if attr.Key == slog.SourceKey {
  1200. source := attr.Value.Any().(*slog.Source)
  1201. source.File = filepath.Base(source.File)
  1202. }
  1203. return attr
  1204. },
  1205. })
  1206. slog.SetDefault(slog.New(handler))
  1207. blobsDir, err := GetBlobsPath("")
  1208. if err != nil {
  1209. return err
  1210. }
  1211. if err := fixBlobs(blobsDir); err != nil {
  1212. return err
  1213. }
  1214. if !envconfig.NoPrune() {
  1215. if _, err := Manifests(false); err != nil {
  1216. slog.Warn("corrupt manifests detected, skipping prune operation. Re-pull or delete to clear", "error", err)
  1217. } else {
  1218. // clean up unused layers and manifests
  1219. if err := PruneLayers(); err != nil {
  1220. return err
  1221. }
  1222. manifestsPath, err := GetManifestPath()
  1223. if err != nil {
  1224. return err
  1225. }
  1226. if err := PruneDirectory(manifestsPath); err != nil {
  1227. return err
  1228. }
  1229. }
  1230. }
  1231. ctx, done := context.WithCancel(context.Background())
  1232. schedCtx, schedDone := context.WithCancel(ctx)
  1233. sched := InitScheduler(schedCtx)
  1234. s := &Server{addr: ln.Addr(), sched: sched}
  1235. http.Handle("/", s.GenerateRoutes())
  1236. slog.Info(fmt.Sprintf("Listening on %s (version %s)", ln.Addr(), version.Version))
  1237. srvr := &http.Server{
  1238. // Use http.DefaultServeMux so we get net/http/pprof for
  1239. // free.
  1240. //
  1241. // TODO(bmizerany): Decide if we want to make this
  1242. // configurable so it is not exposed by default, or allow
  1243. // users to bind it to a different port. This was a quick
  1244. // and easy way to get pprof, but it may not be the best
  1245. // way.
  1246. Handler: nil,
  1247. }
  1248. // listen for a ctrl+c and stop any loaded llm
  1249. signals := make(chan os.Signal, 1)
  1250. signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
  1251. go func() {
  1252. <-signals
  1253. srvr.Close()
  1254. schedDone()
  1255. sched.unloadAllRunners()
  1256. done()
  1257. }()
  1258. // Locate and log what runners are present at startup
  1259. var runnerNames []string
  1260. for v := range runners.GetAvailableServers() {
  1261. runnerNames = append(runnerNames, v)
  1262. }
  1263. slog.Info("Dynamic LLM libraries", "runners", runnerNames)
  1264. slog.Debug("Override detection logic by setting OLLAMA_LLM_LIBRARY")
  1265. s.sched.Run(schedCtx)
  1266. // At startup we retrieve GPU information so we can get log messages before loading a model
  1267. // This will log warnings to the log in case we have problems with detected GPUs
  1268. gpus := discover.GetGPUInfo()
  1269. gpus.LogDetails()
  1270. err = srvr.Serve(ln)
  1271. // If server is closed from the signal handler, wait for the ctx to be done
  1272. // otherwise error out quickly
  1273. if !errors.Is(err, http.ErrServerClosed) {
  1274. return err
  1275. }
  1276. <-ctx.Done()
  1277. return nil
  1278. }
  1279. func waitForStream(c *gin.Context, ch chan interface{}) {
  1280. c.Header("Content-Type", "application/json")
  1281. for resp := range ch {
  1282. switch r := resp.(type) {
  1283. case api.ProgressResponse:
  1284. if r.Status == "success" {
  1285. c.JSON(http.StatusOK, r)
  1286. return
  1287. }
  1288. case gin.H:
  1289. status, ok := r["status"].(int)
  1290. if !ok {
  1291. status = http.StatusInternalServerError
  1292. }
  1293. if errorMsg, ok := r["error"].(string); ok {
  1294. c.JSON(status, gin.H{"error": errorMsg})
  1295. return
  1296. } else {
  1297. c.JSON(status, gin.H{"error": "unexpected error format in progress response"})
  1298. return
  1299. }
  1300. default:
  1301. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected progress response"})
  1302. return
  1303. }
  1304. }
  1305. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected end of progress response"})
  1306. }
  1307. func streamResponse(c *gin.Context, ch chan any) {
  1308. c.Header("Content-Type", "application/x-ndjson")
  1309. c.Stream(func(w io.Writer) bool {
  1310. val, ok := <-ch
  1311. if !ok {
  1312. return false
  1313. }
  1314. bts, err := json.Marshal(val)
  1315. if err != nil {
  1316. slog.Info(fmt.Sprintf("streamResponse: json.Marshal failed with %s", err))
  1317. return false
  1318. }
  1319. // Delineate chunks with new-line delimiter
  1320. bts = append(bts, '\n')
  1321. if _, err := w.Write(bts); err != nil {
  1322. slog.Info(fmt.Sprintf("streamResponse: w.Write failed with %s", err))
  1323. return false
  1324. }
  1325. return true
  1326. })
  1327. }
  1328. func (s *Server) PsHandler(c *gin.Context) {
  1329. models := []api.ProcessModelResponse{}
  1330. for _, v := range s.sched.loaded {
  1331. model := v.model
  1332. modelDetails := api.ModelDetails{
  1333. Format: model.Config.ModelFormat,
  1334. Family: model.Config.ModelFamily,
  1335. Families: model.Config.ModelFamilies,
  1336. ParameterSize: model.Config.ModelType,
  1337. QuantizationLevel: model.Config.FileType,
  1338. }
  1339. mr := api.ProcessModelResponse{
  1340. Model: model.ShortName,
  1341. Name: model.ShortName,
  1342. Size: int64(v.estimatedTotal),
  1343. SizeVRAM: int64(v.estimatedVRAM),
  1344. Digest: model.Digest,
  1345. Details: modelDetails,
  1346. ExpiresAt: v.expiresAt,
  1347. }
  1348. // The scheduler waits to set expiresAt, so if a model is loading it's
  1349. // possible that it will be set to the unix epoch. For those cases, just
  1350. // calculate the time w/ the sessionDuration instead.
  1351. var epoch time.Time
  1352. if v.expiresAt == epoch {
  1353. mr.ExpiresAt = time.Now().Add(v.sessionDuration)
  1354. }
  1355. models = append(models, mr)
  1356. }
  1357. slices.SortStableFunc(models, func(i, j api.ProcessModelResponse) int {
  1358. // longest duration remaining listed first
  1359. return cmp.Compare(j.ExpiresAt.Unix(), i.ExpiresAt.Unix())
  1360. })
  1361. c.JSON(http.StatusOK, api.ProcessResponse{Models: models})
  1362. }
  1363. func (s *Server) ChatHandler(c *gin.Context) {
  1364. checkpointStart := time.Now()
  1365. var req api.ChatRequest
  1366. if err := c.ShouldBindJSON(&req); errors.Is(err, io.EOF) {
  1367. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
  1368. return
  1369. } else if err != nil {
  1370. c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  1371. return
  1372. }
  1373. // expire the runner
  1374. if len(req.Messages) == 0 && req.KeepAlive != nil && int(req.KeepAlive.Seconds()) == 0 {
  1375. model, err := GetModel(req.Model)
  1376. if err != nil {
  1377. switch {
  1378. case os.IsNotExist(err):
  1379. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
  1380. case err.Error() == "invalid model name":
  1381. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  1382. default:
  1383. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1384. }
  1385. return
  1386. }
  1387. s.sched.expireRunner(model)
  1388. c.JSON(http.StatusOK, api.ChatResponse{
  1389. Model: req.Model,
  1390. CreatedAt: time.Now().UTC(),
  1391. Message: api.Message{Role: "assistant"},
  1392. Done: true,
  1393. DoneReason: "unload",
  1394. })
  1395. return
  1396. }
  1397. caps := []Capability{CapabilityCompletion}
  1398. if len(req.Tools) > 0 {
  1399. caps = append(caps, CapabilityTools)
  1400. }
  1401. name := model.ParseName(req.Model)
  1402. if !name.IsValid() {
  1403. c.JSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  1404. return
  1405. }
  1406. name, err := getExistingName(name)
  1407. if err != nil {
  1408. c.JSON(http.StatusBadRequest, gin.H{"error": "model is required"})
  1409. return
  1410. }
  1411. r, m, opts, err := s.scheduleRunner(c.Request.Context(), name.String(), caps, req.Options, req.KeepAlive)
  1412. if errors.Is(err, errCapabilityCompletion) {
  1413. c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("%q does not support chat", req.Model)})
  1414. return
  1415. } else if err != nil {
  1416. handleScheduleError(c, req.Model, err)
  1417. return
  1418. }
  1419. checkpointLoaded := time.Now()
  1420. if len(req.Messages) == 0 {
  1421. c.JSON(http.StatusOK, api.ChatResponse{
  1422. Model: req.Model,
  1423. CreatedAt: time.Now().UTC(),
  1424. Message: api.Message{Role: "assistant"},
  1425. Done: true,
  1426. DoneReason: "load",
  1427. })
  1428. return
  1429. }
  1430. msgs := append(m.Messages, req.Messages...)
  1431. if req.Messages[0].Role != "system" && m.System != "" {
  1432. msgs = append([]api.Message{{Role: "system", Content: m.System}}, msgs...)
  1433. }
  1434. prompt, images, err := chatPrompt(c.Request.Context(), m, r.Tokenize, opts, msgs, req.Tools)
  1435. if err != nil {
  1436. slog.Error("chat prompt error", "error", err)
  1437. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1438. return
  1439. }
  1440. slog.Debug("chat request", "images", len(images), "prompt", prompt)
  1441. ch := make(chan any)
  1442. go func() {
  1443. defer close(ch)
  1444. var sb strings.Builder
  1445. var toolCallIndex int = 0
  1446. if err := r.Completion(c.Request.Context(), llm.CompletionRequest{
  1447. Prompt: prompt,
  1448. Images: images,
  1449. Format: req.Format,
  1450. Options: opts,
  1451. }, func(r llm.CompletionResponse) {
  1452. res := api.ChatResponse{
  1453. Model: req.Model,
  1454. CreatedAt: time.Now().UTC(),
  1455. Message: api.Message{Role: "assistant", Content: r.Content},
  1456. Done: r.Done,
  1457. DoneReason: r.DoneReason,
  1458. Metrics: api.Metrics{
  1459. PromptEvalCount: r.PromptEvalCount,
  1460. PromptEvalDuration: r.PromptEvalDuration,
  1461. EvalCount: r.EvalCount,
  1462. EvalDuration: r.EvalDuration,
  1463. },
  1464. }
  1465. if r.Done {
  1466. res.TotalDuration = time.Since(checkpointStart)
  1467. res.LoadDuration = checkpointLoaded.Sub(checkpointStart)
  1468. }
  1469. // TODO: tool call checking and filtering should be moved outside of this callback once streaming
  1470. // however this was a simple change for now without reworking streaming logic of this (and other)
  1471. // handlers
  1472. if req.Stream != nil && !*req.Stream || len(req.Tools) == 0 {
  1473. ch <- res
  1474. return
  1475. }
  1476. // Streaming tool calls:
  1477. // If tools are recognized, use a flag to track the sending of a tool downstream
  1478. // This ensures that content is cleared from the message on the last chunk sent
  1479. sb.WriteString(r.Content)
  1480. if toolCalls, ok := m.parseToolCalls(sb.String()); ok {
  1481. res.Message.ToolCalls = toolCalls
  1482. for i := range toolCalls {
  1483. toolCalls[i].Function.Index = toolCallIndex
  1484. toolCallIndex++
  1485. }
  1486. res.Message.Content = ""
  1487. sb.Reset()
  1488. ch <- res
  1489. return
  1490. }
  1491. if r.Done {
  1492. // Send any remaining content if no tool calls were detected
  1493. if toolCallIndex == 0 {
  1494. res.Message.Content = sb.String()
  1495. }
  1496. ch <- res
  1497. }
  1498. }); err != nil {
  1499. ch <- gin.H{"error": err.Error()}
  1500. }
  1501. }()
  1502. if req.Stream != nil && !*req.Stream {
  1503. var resp api.ChatResponse
  1504. var sb strings.Builder
  1505. for rr := range ch {
  1506. switch t := rr.(type) {
  1507. case api.ChatResponse:
  1508. sb.WriteString(t.Message.Content)
  1509. resp = t
  1510. case gin.H:
  1511. msg, ok := t["error"].(string)
  1512. if !ok {
  1513. msg = "unexpected error format in response"
  1514. }
  1515. c.JSON(http.StatusInternalServerError, gin.H{"error": msg})
  1516. return
  1517. default:
  1518. c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected response"})
  1519. return
  1520. }
  1521. }
  1522. resp.Message.Content = sb.String()
  1523. if len(req.Tools) > 0 {
  1524. if toolCalls, ok := m.parseToolCalls(sb.String()); ok {
  1525. resp.Message.ToolCalls = toolCalls
  1526. resp.Message.Content = ""
  1527. }
  1528. }
  1529. c.JSON(http.StatusOK, resp)
  1530. return
  1531. }
  1532. streamResponse(c, ch)
  1533. }
  1534. func handleScheduleError(c *gin.Context, name string, err error) {
  1535. switch {
  1536. case errors.Is(err, errCapabilities), errors.Is(err, errRequired):
  1537. c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
  1538. case errors.Is(err, context.Canceled):
  1539. c.JSON(499, gin.H{"error": "request canceled"})
  1540. case errors.Is(err, ErrMaxQueue):
  1541. c.JSON(http.StatusServiceUnavailable, gin.H{"error": err.Error()})
  1542. case errors.Is(err, os.ErrNotExist):
  1543. c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model %q not found, try pulling it first", name)})
  1544. default:
  1545. c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
  1546. }
  1547. }