convert_bert.go 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175
  1. package convert
  2. import (
  3. "cmp"
  4. "encoding/json"
  5. "io/fs"
  6. "path/filepath"
  7. "slices"
  8. "strings"
  9. "github.com/ollama/ollama/llm"
  10. )
  11. type bert struct {
  12. Parameters
  13. NLayers uint32 `json:"n_layers"`
  14. NumHiddenLayers uint32 `json:"num_hidden_layers"`
  15. NLayer uint32 `json:"n_layer"`
  16. MaxPositionEmbeddings uint32 `json:"max_position_embeddings"`
  17. NCtx uint32 `json:"n_ctx"`
  18. HiddenSize uint32 `json:"hidden_size"`
  19. NEmbd uint32 `json:"n_embd"`
  20. IntermediateSize uint32 `json:"intermediate_size"`
  21. NInner uint32 `json:"n_inner"`
  22. NumAttentionHeads uint32 `json:"num_attention_heads"`
  23. NHead uint32 `json:"n_head"`
  24. NumKeyValueHeads uint32 `json:"num_key_value_heads"`
  25. LayerNormEPS float32 `json:"layer_norm_eps"`
  26. LayerNormEpsilon float32 `json:"layer_norm_epsilon"`
  27. NormEpsilon float32 `json:"norm_epsilon"`
  28. PoolingType uint32
  29. }
  30. var (
  31. _ Converter = (*bert)(nil)
  32. _ moreParser = (*bert)(nil)
  33. )
  34. func (p *bert) parseMore(fsys fs.FS) error {
  35. bts, err := fs.ReadFile(fsys, "modules.json")
  36. if err != nil {
  37. return err
  38. }
  39. var modules []struct {
  40. Type string `json:"type"`
  41. Path string `json:"path"`
  42. }
  43. if err := json.Unmarshal(bts, &modules); err != nil {
  44. return err
  45. }
  46. var pooling string
  47. for _, m := range modules {
  48. if m.Type == "sentence_transformers.models.Pooling" {
  49. pooling = m.Path
  50. break
  51. }
  52. }
  53. if pooling != "" {
  54. bts, err := fs.ReadFile(fsys, filepath.Join(pooling, "config.json"))
  55. if err != nil {
  56. return err
  57. }
  58. var pc struct {
  59. PoolingModeCLSToken bool `json:"pooling_mode_cls_token"`
  60. PoolingModeMeanTokens bool `json:"pooling_mode_mean_tokens"`
  61. }
  62. if err := json.Unmarshal(bts, &pc); err != nil {
  63. return err
  64. }
  65. if pc.PoolingModeMeanTokens {
  66. p.PoolingType = 1
  67. } else if pc.PoolingModeCLSToken {
  68. p.PoolingType = 2
  69. }
  70. }
  71. return nil
  72. }
  73. func (p *bert) KV(t *Tokenizer) llm.KV {
  74. kv := p.Parameters.KV(t)
  75. kv["general.architecture"] = "bert"
  76. kv["general.name"] = "bert"
  77. kv["bert.attention.causal"] = false
  78. kv["bert.pooling_type"] = p.PoolingType
  79. kv["bert.block_count"] = cmp.Or(p.NLayers, p.NumHiddenLayers, p.NLayer)
  80. if contextLength := cmp.Or(p.MaxPositionEmbeddings, p.NCtx); contextLength > 0 {
  81. kv["bert.context_length"] = contextLength
  82. }
  83. if embeddingLength := cmp.Or(p.HiddenSize, p.NEmbd); embeddingLength > 0 {
  84. kv["bert.embedding_length"] = cmp.Or(p.HiddenSize, p.NEmbd)
  85. }
  86. if feedForwardLength := cmp.Or(p.IntermediateSize, p.NInner); feedForwardLength > 0 {
  87. kv["bert.feed_forward_length"] = cmp.Or(p.IntermediateSize, p.NInner)
  88. }
  89. if headCount := cmp.Or(p.NumAttentionHeads, p.NHead); headCount > 0 {
  90. kv["bert.attention.head_count"] = cmp.Or(p.NumAttentionHeads, p.NHead)
  91. }
  92. if layerNormEpsilon := cmp.Or(p.LayerNormEPS, p.LayerNormEpsilon, p.NormEpsilon); layerNormEpsilon > 0 {
  93. kv["bert.attention.layer_norm_epsilon"] = layerNormEpsilon
  94. }
  95. kv["tokenizer.ggml.model"] = "bert"
  96. kv["tokenizer.ggml.token_type_count"] = uint32(2)
  97. // convert to phantom space tokens
  98. for i, e := range t.Tokens {
  99. if strings.HasPrefix(e, "[") && strings.HasSuffix(e, "]") {
  100. // noop
  101. } else if strings.HasPrefix(e, "##") {
  102. t.Tokens[i] = e[2:]
  103. } else {
  104. t.Tokens[i] = "\u2581" + e
  105. }
  106. }
  107. kv["tokenizer.ggml.tokens"] = t.Tokens
  108. return kv
  109. }
  110. func (p *bert) Tensors(ts []Tensor) []llm.Tensor {
  111. var out []llm.Tensor
  112. for _, t := range ts {
  113. if slices.Contains([]string{
  114. "embeddings.position_ids",
  115. "pooler.dense.weight",
  116. "pooler.dense.bias",
  117. }, t.Name()) {
  118. continue
  119. }
  120. out = append(out, llm.Tensor{
  121. Name: t.Name(),
  122. Kind: t.Kind(),
  123. Shape: t.Shape(),
  124. WriterTo: t,
  125. })
  126. }
  127. return out
  128. }
  129. func (bert) Replacements() []string {
  130. return []string{
  131. "encoder.layer", "blk",
  132. "encoder.layers", "blk",
  133. "embeddings.word_embeddings", "token_embd",
  134. "embeddings.token_type_embeddings", "token_types",
  135. "embeddings.LayerNorm", "token_embd_norm",
  136. "embeddings.position_embeddings", "position_embd",
  137. "attention.self.query", "attn_q",
  138. "attention.self.key", "attn_k",
  139. "attention.self.value", "attn_v",
  140. "attention.output.dense", "attn_output",
  141. "attention.output.LayerNorm", "attn_output_norm",
  142. "intermediate.dense", "ffn_up",
  143. "output.dense", "ffn_down",
  144. "output.LayerNorm", "layer_output_norm",
  145. }
  146. }