process_text_test.go 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570
  1. package model
  2. import (
  3. "bufio"
  4. "encoding/json"
  5. "math"
  6. "os"
  7. "path/filepath"
  8. "slices"
  9. "strconv"
  10. "strings"
  11. "testing"
  12. "github.com/google/go-cmp/cmp"
  13. )
  14. func llama(t testing.TB) BytePairEncoding {
  15. t.Helper()
  16. f, err := os.Open(filepath.Join("testdata", "llama3.2", "encoder.json"))
  17. if err != nil {
  18. t.Fatal(err)
  19. }
  20. defer f.Close()
  21. vocab := make(map[string]int32)
  22. if err := json.NewDecoder(f).Decode(&vocab); err != nil {
  23. t.Fatal(err)
  24. }
  25. types := make([]uint32, len(vocab))
  26. tokens := make([]string, len(vocab))
  27. for token, id := range vocab {
  28. tokens[id] = token
  29. types[id] = 1
  30. }
  31. for _, token := range []string{"<|begin_of_text|>", "<|end_of_text|>"} {
  32. if _, ok := vocab[token]; !ok {
  33. tokens = append(tokens, token) //nolint:makezero
  34. types = append(types, 3) //nolint:makezero
  35. vocab[token] = int32(len(vocab))
  36. }
  37. }
  38. f, err = os.Open(filepath.Join("testdata", "llama3.2", "vocab.bpe"))
  39. if err != nil {
  40. t.Fatal(err)
  41. }
  42. defer f.Close()
  43. merges := make([]string, 0, 50000)
  44. scanner := bufio.NewScanner(f)
  45. for scanner.Scan() {
  46. if !strings.HasPrefix(scanner.Text(), "#") {
  47. merges = append(merges, scanner.Text())
  48. }
  49. }
  50. return NewBytePairEncoding(
  51. `(?i:'s|'t|'re|'ve|'m|'ll|'d)|[^\r\n\p{L}\p{N}]?\p{L}+|\p{N}{1,3}| ?[^\s\p{L}\p{N}]+[\r\n]*|\s*[\r\n]+|\s+(?!\S)|\s+`,
  52. &Vocabulary{
  53. Values: tokens,
  54. Types: types,
  55. Merges: merges,
  56. },
  57. )
  58. }
  59. func TestLlama(t *testing.T) {
  60. tokenizer := llama(t)
  61. t.Run("simple", func(t *testing.T) {
  62. t.Parallel()
  63. ids, err := tokenizer.Encode("hello world", true)
  64. if err != nil {
  65. t.Error(err)
  66. }
  67. if diff := cmp.Diff([]int32{15339, 1917}, ids); diff != "" {
  68. t.Errorf("no match (-theirs +ours):\n%s", diff)
  69. }
  70. s, err := tokenizer.Decode([]int32{15339, 1917})
  71. if err != nil {
  72. t.Fatal(err)
  73. }
  74. if s != "hello world" {
  75. t.Errorf("got %q, want hello world", s)
  76. }
  77. ids, err = tokenizer.Encode("hello <|end_of_text|>", true)
  78. if err != nil {
  79. t.Error(err)
  80. }
  81. if diff := cmp.Diff([]int32{15339, 220, 128001}, ids); diff != "" {
  82. t.Errorf("no match (-theirs +ours):\n%s", diff)
  83. }
  84. })
  85. t.Run("simple repeated", func(t *testing.T) {
  86. t.Parallel()
  87. cases := map[string][]int32{
  88. strings.Repeat("0", 1): {15},
  89. strings.Repeat("0", 2): {410},
  90. strings.Repeat("0", 3): {931},
  91. strings.Repeat("0", 4): {931, 15},
  92. strings.Repeat("0", 5): {931, 410},
  93. strings.Repeat("0", 6): {931, 931},
  94. strings.Repeat("0", 7): {931, 931, 15},
  95. strings.Repeat("0", 8): {931, 931, 410},
  96. strings.Repeat("0", 9): {931, 931, 931},
  97. strings.Repeat("0", 10): {931, 931, 931, 15},
  98. strings.Repeat("0", 11): {931, 931, 931, 410},
  99. strings.Repeat("0", 12): {931, 931, 931, 931},
  100. strings.Repeat("0", 13): {931, 931, 931, 931, 15},
  101. strings.Repeat("0", 14): {931, 931, 931, 931, 410},
  102. strings.Repeat("0", 15): {931, 931, 931, 931, 931},
  103. strings.Repeat("0", 16): {931, 931, 931, 931, 931, 15},
  104. strings.Repeat("0", 17): {931, 931, 931, 931, 931, 410},
  105. }
  106. for s, want := range cases {
  107. ids, err := tokenizer.Encode(s, true)
  108. if err != nil {
  109. t.Error(err)
  110. }
  111. if diff := cmp.Diff(want, ids); diff != "" {
  112. t.Errorf("%q no match (-theirs +ours):\n%s", s, diff)
  113. }
  114. }
  115. })
  116. t.Run("basic roundtrip", func(t *testing.T) {
  117. t.Parallel()
  118. cases := []string{
  119. "hello",
  120. "hello ",
  121. "hello ",
  122. " hello",
  123. " hello ",
  124. " hello ",
  125. "hello world",
  126. "请考试我的软件!12345",
  127. }
  128. for _, want := range cases {
  129. ids, err := tokenizer.Encode(want, true)
  130. if err != nil {
  131. t.Error(err)
  132. }
  133. if got, err := tokenizer.Decode(ids); err != nil {
  134. t.Fatal(err)
  135. } else if got != want {
  136. t.Errorf("got %q, want %q", got, want)
  137. }
  138. }
  139. })
  140. t.Run("special", func(t *testing.T) {
  141. t.Parallel()
  142. cases := map[string][]int32{
  143. "<|begin_of_text|>A B!": {128000, 32, 426, 0},
  144. "<|begin_of_text|>A<|end_of_text|>B!": {128000, 32, 128001, 33, 0},
  145. "<|begin_of_text|>A<|end_of_text|>B<|begin_of_text|>!": {128000, 32, 128001, 33, 128000, 0},
  146. "<|begin_of_text|>A<|end_of_text|>B<|begin_of_text|>!<|end_of_text|>": {128000, 32, 128001, 33, 128000, 0, 128001},
  147. }
  148. for s, want := range cases {
  149. ids, err := tokenizer.Encode(s, true)
  150. if err != nil {
  151. t.Fatal(err)
  152. }
  153. if diff := cmp.Diff(want, ids); diff != "" {
  154. t.Errorf("no match (-theirs +ours):\n%s", diff)
  155. }
  156. }
  157. })
  158. t.Run("split", func(t *testing.T) {
  159. t.Parallel()
  160. cases := map[string][]string{
  161. "Hello World!": {"Hello", " World", "!"},
  162. "I'm don't won't": {"I", "'m", " don", "'t", " won", "'t"},
  163. "In 2024 there are 366 days": {"In", " ", "202", "4", " there", " are", " ", "366", " days"},
  164. "Hello!! ...world": {"Hello", "!!", " ...", "world"},
  165. "Hello World": {"Hello", " ", " World"},
  166. "Hello\nWorld": {"Hello", "\n", "World"},
  167. "Hello, WORLD!! How's it going?": {"Hello", ",", " WORLD", "!!", " How", "'s", " it", " going", "?"},
  168. }
  169. for s, want := range cases {
  170. got := slices.Collect(tokenizer.split(s))
  171. if diff := cmp.Diff(want, got); diff != "" {
  172. t.Errorf("no match (-theirs +ours):\n%s", diff)
  173. }
  174. }
  175. })
  176. }
  177. // tekken loads the Tekken tokenizer for testing
  178. func tekken(t testing.TB) TextProcessor {
  179. t.Helper()
  180. // Load tokenizer config from mistral-small
  181. tokenizerConfigPath := filepath.Join("testdata", "mistral-small", "tokenizer_config.json")
  182. configFile, err := os.Open(tokenizerConfigPath)
  183. if err != nil {
  184. t.Fatal(err)
  185. }
  186. defer configFile.Close()
  187. var config struct {
  188. AddBosToken bool `json:"add_bos_token"`
  189. AddEosToken bool `json:"add_eos_token"`
  190. BosToken string `json:"bos_token"`
  191. EosToken string `json:"eos_token"`
  192. }
  193. if err := json.NewDecoder(configFile).Decode(&config); err != nil {
  194. t.Fatal(err)
  195. }
  196. // Load tokenizer.json which contains the vocabulary and other settings
  197. tokenizerJsonPath := filepath.Join("testdata", "mistral-small", "tokenizer.json")
  198. tokenizerFile, err := os.Open(tokenizerJsonPath)
  199. if err != nil {
  200. t.Fatal(err)
  201. }
  202. defer tokenizerFile.Close()
  203. var tokenizerData struct {
  204. Model struct {
  205. Type string `json:"type"`
  206. Vocab map[string]int32 `json:"vocab"`
  207. Merges []string `json:"merges"`
  208. } `json:"model"`
  209. AddedTokens []struct {
  210. Id int32 `json:"id"`
  211. Content string `json:"content"`
  212. Special bool `json:"special"`
  213. } `json:"added_tokens"`
  214. PreTokenizer struct {
  215. Type string `json:"type"`
  216. Pretokenizers []struct {
  217. Type string `json:"type"`
  218. Pattern struct {
  219. String string `json:"String"`
  220. } `json:"pattern"`
  221. Behavior string `json:"behavior"`
  222. } `json:"pretokenizers"`
  223. } `json:"pre_tokenizer"`
  224. }
  225. if err := json.NewDecoder(tokenizerFile).Decode(&tokenizerData); err != nil {
  226. t.Fatal(err)
  227. }
  228. // Extract the pattern from pre_tokenizer if available
  229. var pattern string
  230. if tokenizerData.PreTokenizer.Type == "Sequence" && len(tokenizerData.PreTokenizer.Pretokenizers) > 0 {
  231. pattern = tokenizerData.PreTokenizer.Pretokenizers[0].Pattern.String
  232. }
  233. // Combine regular vocab and added tokens
  234. vocab := tokenizerData.Model.Vocab
  235. // Add special tokens from added_tokens
  236. for _, token := range tokenizerData.AddedTokens {
  237. vocab[token.Content] = token.Id
  238. }
  239. // Create vocabulary arrays
  240. maxId := int32(-1)
  241. for _, id := range vocab {
  242. if id > maxId {
  243. maxId = id
  244. }
  245. }
  246. vocabSize := int(maxId + 1)
  247. types := make([]uint32, vocabSize)
  248. tokens := make([]string, vocabSize)
  249. scores := make([]float32, vocabSize)
  250. for token, id := range vocab {
  251. tokens[id] = token
  252. types[id] = TOKEN_TYPE_NORMAL
  253. // Assign appropriate token types for special tokens
  254. if token == "<s>" {
  255. types[id] = TOKEN_TYPE_CONTROL
  256. } else if token == "</s>" {
  257. types[id] = TOKEN_TYPE_CONTROL
  258. } else if token == "[INST]" || token == "[/INST]" {
  259. types[id] = TOKEN_TYPE_CONTROL
  260. }
  261. }
  262. // In Tekken, we don't need to load merges separately as they're part of the model
  263. var merges []string
  264. // Create vocabulary object
  265. vocabObj := &Vocabulary{
  266. Values: tokens,
  267. Types: types,
  268. Scores: scores,
  269. Merges: merges,
  270. BOS: vocab[config.BosToken],
  271. EOS: vocab[config.EosToken],
  272. AddBOS: config.AddBosToken,
  273. AddEOS: config.AddEosToken,
  274. }
  275. // Use pattern from tokenizer.json if available
  276. if pattern != "" {
  277. // Ensure pattern has proper escaping for Go regexp
  278. pattern = strings.ReplaceAll(pattern, "p{", "\\p{")
  279. return NewBytePairEncoding(pattern, vocabObj)
  280. }
  281. // Fallback pattern if not found
  282. return NewBytePairEncoding(
  283. `\p{L}+|\p{N}+|[^\s\p{L}\p{N}]+|\s+`,
  284. vocabObj,
  285. )
  286. }
  287. func TestTekken(t *testing.T) {
  288. // Skip if the test data isn't available
  289. if _, err := os.Stat(filepath.Join("testdata", "mistral-small")); os.IsNotExist(err) {
  290. t.Skip("Mistral-small test data not available")
  291. }
  292. tokenizer := tekken(t)
  293. t.Run("whitespace_handling", func(t *testing.T) {
  294. t.Parallel()
  295. // The key difference from SentencePiece is that Tekken doesn't prepend whitespace
  296. cases := []struct {
  297. input string
  298. expected string
  299. }{
  300. {" hello", " hello"},
  301. {"hello ", "hello "},
  302. {"hello world", "hello world"},
  303. {" hello world ", " hello world "},
  304. }
  305. for _, tc := range cases {
  306. ids, err := tokenizer.Encode(tc.input, false)
  307. if err != nil {
  308. t.Errorf("Failed to encode %q: %v", tc.input, err)
  309. continue
  310. }
  311. decoded, err := tokenizer.Decode(ids)
  312. if err != nil {
  313. t.Errorf("Failed to decode tokens for %q: %v", tc.input, err)
  314. continue
  315. }
  316. if decoded != tc.expected {
  317. t.Errorf("Whitespace handling: got %q, want %q", decoded, tc.expected)
  318. }
  319. }
  320. })
  321. t.Run("chat_templates", func(t *testing.T) {
  322. t.Parallel()
  323. // Test the Tekken chat template format which doesn't have spaces after special tokens
  324. templates := []struct {
  325. input string
  326. expectSpace bool // whether we expect a space after special tokens
  327. }{
  328. {"<s>[INST]user message[/INST]", false},
  329. {"<s>[INST] user message[/INST]", true},
  330. {"<s>[INST]user message [/INST]", true},
  331. }
  332. for _, tc := range templates {
  333. ids, err := tokenizer.Encode(tc.input, false)
  334. if err != nil {
  335. t.Errorf("Failed to encode %q: %v", tc.input, err)
  336. continue
  337. }
  338. decoded, err := tokenizer.Decode(ids)
  339. if err != nil {
  340. t.Errorf("Failed to decode tokens for %q: %v", tc.input, err)
  341. continue
  342. }
  343. // Check if there's a space after special tokens
  344. hasSpaceAfterINST := strings.Contains(decoded, "[INST] ")
  345. if hasSpaceAfterINST != tc.expectSpace {
  346. t.Errorf("Chat template space handling: got space=%v, want space=%v for %q",
  347. hasSpaceAfterINST, tc.expectSpace, tc.input)
  348. }
  349. }
  350. })
  351. t.Run("special_tokens", func(t *testing.T) {
  352. t.Parallel()
  353. // Test how Tekken handles special tokens
  354. cases := []struct {
  355. input string
  356. expected []string // We'll check if these tokens are in the decoded output
  357. }{
  358. {"<s>[INST]hello[/INST]", []string{"<s>", "[INST]", "hello", "[/INST]"}},
  359. {"[INST]hello[/INST]</s>", []string{"[INST]", "hello", "[/INST]", "</s>"}},
  360. {"<s>[INST]hello[/INST]</s>[INST]again[/INST]", []string{"<s>", "[INST]", "hello", "[/INST]", "</s>", "[INST]", "again", "[/INST]"}},
  361. }
  362. for _, tc := range cases {
  363. ids, err := tokenizer.Encode(tc.input, false)
  364. if err != nil {
  365. t.Errorf("Failed to encode %q: %v", tc.input, err)
  366. continue
  367. }
  368. decoded, err := tokenizer.Decode(ids)
  369. if err != nil {
  370. t.Errorf("Failed to decode tokens for %q: %v", tc.input, err)
  371. continue
  372. }
  373. for _, expected := range tc.expected {
  374. if !strings.Contains(decoded, expected) {
  375. t.Errorf("Special token handling: %q missing in decoded output %q", expected, decoded)
  376. }
  377. }
  378. }
  379. })
  380. t.Run("vocabulary_coverage", func(t *testing.T) {
  381. t.Parallel()
  382. // Tekken has a larger vocabulary, so test coverage of various token types
  383. samples := []string{
  384. "Hello world!",
  385. "This is a test of the Tekken tokenizer.",
  386. "It has a considerably larger vocabulary size.",
  387. "Special characters: !@#$%^&*()",
  388. "Numbers: 1234567890",
  389. "Multiple languages: こんにちは 你好 안녕하세요",
  390. "Code snippets: def function(): return True",
  391. }
  392. for _, sample := range samples {
  393. ids, err := tokenizer.Encode(sample, false)
  394. if err != nil {
  395. t.Errorf("Failed to encode %q: %v", sample, err)
  396. continue
  397. }
  398. decoded, err := tokenizer.Decode(ids)
  399. if err != nil {
  400. t.Errorf("Failed to decode tokens for %q: %v", sample, err)
  401. continue
  402. }
  403. if decoded != sample {
  404. t.Errorf("Vocabulary coverage: got %q, want %q", decoded, sample)
  405. }
  406. }
  407. })
  408. t.Run("splitting_behavior", func(t *testing.T) {
  409. t.Parallel()
  410. // Test the splitting behavior which might differ from SentencePiece
  411. cases := map[string][]string{
  412. "Hello World!": {"Hello", " World", "!"},
  413. "user message": {"user", " message"},
  414. "[INST]hello": {"[INST]", "hello"},
  415. "hello[/INST]": {"hello", "[/INST]"},
  416. }
  417. for s, want := range cases {
  418. got := slices.Collect(tokenizer.(*BytePairEncoding).split(s))
  419. if diff := cmp.Diff(want, got); diff != "" {
  420. t.Errorf("Splitting behavior no match (-want +got):\n%s", diff)
  421. }
  422. }
  423. })
  424. t.Run("full_chat_sequence", func(t *testing.T) {
  425. t.Parallel()
  426. // Test a complete chat sequence with Tekken's format
  427. chatSequence := "<s>[INST]user message[/INST]assistant message</s>[INST]new user message[/INST]"
  428. ids, err := tokenizer.Encode(chatSequence, false)
  429. if err != nil {
  430. t.Fatalf("Failed to encode chat sequence: %v", err)
  431. }
  432. decoded, err := tokenizer.Decode(ids)
  433. if err != nil {
  434. t.Fatalf("Failed to decode chat sequence tokens: %v", err)
  435. }
  436. // In Tekken, the whitespace shouldn't be added after special tokens
  437. if strings.Contains(decoded, "[INST] ") {
  438. t.Errorf("Tekken chat sequence has unexpected space after [INST]: %q", decoded)
  439. }
  440. if strings.Contains(decoded, "[/INST] ") {
  441. t.Errorf("Tekken chat sequence has unexpected space after [/INST]: %q", decoded)
  442. }
  443. })
  444. }
  445. func BenchmarkBytePairEncoding(b *testing.B) {
  446. tokenizer := llama(b)
  447. bts, err := os.ReadFile(filepath.Join("testdata", "war-and-peace.txt"))
  448. if err != nil {
  449. b.Fatal(err)
  450. }
  451. for i := range 8 {
  452. n := min(int(math.Pow10(i)), len(bts))
  453. bts := bts[:n]
  454. b.Run("encode"+strconv.Itoa(n), func(b *testing.B) {
  455. b.ResetTimer()
  456. for range b.N {
  457. _, err := tokenizer.Encode(string(bts), true)
  458. if err != nil {
  459. b.Fatal(err)
  460. }
  461. }
  462. })
  463. b.Run("decode"+strconv.Itoa(n), func(b *testing.B) {
  464. ids, err := tokenizer.Encode(string(bts), true)
  465. if err != nil {
  466. b.Fatal(err)
  467. }
  468. b.ResetTimer()
  469. for range b.N {
  470. _, err := tokenizer.Decode(ids)
  471. if err != nil {
  472. b.Fatal(err)
  473. }
  474. }
  475. })
  476. b.Run("split"+strconv.Itoa(n), func(b *testing.B) {
  477. b.ResetTimer()
  478. for range b.N {
  479. slices.Collect(tokenizer.split(string(bts)))
  480. }
  481. })
  482. }
  483. }