llm.go 709 B

12345678910111213141516171819202122232425262728293031323334353637383940
  1. package llm
  2. import (
  3. "fmt"
  4. "os"
  5. "github.com/jmorganca/ollama/api"
  6. )
  7. type LLM interface {
  8. Predict([]int, string, func(api.GenerateResponse)) error
  9. Embedding(string) ([]float64, error)
  10. Encode(string) []int
  11. Decode(...int) string
  12. SetOptions(api.Options)
  13. Close()
  14. }
  15. func New(model string, opts api.Options) (LLM, error) {
  16. if _, err := os.Stat(model); err != nil {
  17. return nil, err
  18. }
  19. f, err := os.Open(model)
  20. if err != nil {
  21. return nil, err
  22. }
  23. ggml, err := DecodeGGML(f, ModelFamilyLlama)
  24. if err != nil {
  25. return nil, err
  26. }
  27. switch ggml.ModelFamily {
  28. case ModelFamilyLlama:
  29. return newLlama(model, opts)
  30. default:
  31. return nil, fmt.Errorf("unknown ggml type: %s", ggml.ModelFamily)
  32. }
  33. }