routes.go 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. package server
  2. import (
  3. "embed"
  4. "encoding/json"
  5. "fmt"
  6. "io"
  7. "log"
  8. "math"
  9. "net"
  10. "net/http"
  11. "path"
  12. "runtime"
  13. "strings"
  14. "text/template"
  15. "github.com/gin-gonic/gin"
  16. "github.com/lithammer/fuzzysearch/fuzzy"
  17. "github.com/jmorganca/ollama/api"
  18. "github.com/jmorganca/ollama/llama"
  19. )
  20. //go:embed templates/*
  21. var templatesFS embed.FS
  22. var templates = template.Must(template.ParseFS(templatesFS, "templates/*.prompt"))
  23. func generate(c *gin.Context) {
  24. // TODO: these should be request parameters
  25. gpulayers := 1
  26. tokens := 512
  27. threads := runtime.NumCPU()
  28. var req api.GenerateRequest
  29. if err := c.ShouldBindJSON(&req); err != nil {
  30. c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
  31. return
  32. }
  33. if remoteModel, _ := getRemote(req.Model); remoteModel != nil {
  34. req.Model = remoteModel.FullName()
  35. }
  36. model, err := llama.New(req.Model, llama.EnableF16Memory, llama.SetContext(128), llama.EnableEmbeddings, llama.SetGPULayers(gpulayers))
  37. if err != nil {
  38. fmt.Println("Loading the model failed:", err.Error())
  39. return
  40. }
  41. defer model.Free()
  42. templateNames := make([]string, 0, len(templates.Templates()))
  43. for _, template := range templates.Templates() {
  44. templateNames = append(templateNames, template.Name())
  45. }
  46. match, _ := matchRankOne(path.Base(req.Model), templateNames)
  47. if template := templates.Lookup(match); template != nil {
  48. var sb strings.Builder
  49. if err := template.Execute(&sb, req); err != nil {
  50. fmt.Println("Prompt template failed:", err.Error())
  51. return
  52. }
  53. req.Prompt = sb.String()
  54. }
  55. ch := make(chan string)
  56. go func() {
  57. defer close(ch)
  58. _, err := model.Predict(req.Prompt, llama.Debug, llama.SetTokenCallback(func(token string) bool {
  59. ch <- token
  60. return true
  61. }), llama.SetTokens(tokens), llama.SetThreads(threads), llama.SetTopK(90), llama.SetTopP(0.86), llama.SetStopWords("llama"))
  62. if err != nil {
  63. panic(err)
  64. }
  65. }()
  66. c.Stream(func(w io.Writer) bool {
  67. token, ok := <-ch
  68. if !ok {
  69. return false
  70. }
  71. resp := api.GenerateResponse{
  72. Response: token,
  73. }
  74. bts, err := json.Marshal(resp)
  75. if err != nil {
  76. return false
  77. }
  78. bts = append(bts, '\n')
  79. if _, err := w.Write(bts); err != nil {
  80. return false
  81. }
  82. return true
  83. })
  84. }
  85. func Serve(ln net.Listener) error {
  86. r := gin.Default()
  87. r.POST("api/pull", func(c *gin.Context) {
  88. var req api.PullRequest
  89. if err := c.ShouldBindJSON(&req); err != nil {
  90. c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
  91. return
  92. }
  93. progressCh := make(chan api.PullProgress)
  94. go func() {
  95. defer close(progressCh)
  96. if err := pull(req.Model, progressCh); err != nil {
  97. c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
  98. return
  99. }
  100. }()
  101. c.Stream(func(w io.Writer) bool {
  102. progress, ok := <-progressCh
  103. if !ok {
  104. return false
  105. }
  106. bts, err := json.Marshal(progress)
  107. if err != nil {
  108. return false
  109. }
  110. bts = append(bts, '\n')
  111. if _, err := w.Write(bts); err != nil {
  112. return false
  113. }
  114. return true
  115. })
  116. })
  117. r.POST("/api/generate", generate)
  118. log.Printf("Listening on %s", ln.Addr())
  119. s := &http.Server{
  120. Handler: r,
  121. }
  122. return s.Serve(ln)
  123. }
  124. func matchRankOne(source string, targets []string) (bestMatch string, bestRank int) {
  125. bestRank = math.MaxInt
  126. for _, target := range targets {
  127. if rank := fuzzy.LevenshteinDistance(source, target); bestRank > rank {
  128. bestRank = rank
  129. bestMatch = target
  130. }
  131. }
  132. return
  133. }