openai_test.go 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787
  1. package openai
  2. import (
  3. "bytes"
  4. "encoding/base64"
  5. "encoding/json"
  6. "io"
  7. "net/http"
  8. "net/http/httptest"
  9. "strings"
  10. "testing"
  11. "time"
  12. "github.com/gin-gonic/gin"
  13. "github.com/google/go-cmp/cmp"
  14. "github.com/ollama/ollama/api"
  15. )
  16. const (
  17. prefix = `data:image/jpeg;base64,`
  18. image = `iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNk+A8AAQUBAScY42YAAAAASUVORK5CYII=`
  19. )
  20. var (
  21. False = false
  22. True = true
  23. )
  24. func captureRequestMiddleware(capturedRequest any) gin.HandlerFunc {
  25. return func(c *gin.Context) {
  26. bodyBytes, _ := io.ReadAll(c.Request.Body)
  27. c.Request.Body = io.NopCloser(bytes.NewReader(bodyBytes))
  28. err := json.Unmarshal(bodyBytes, capturedRequest)
  29. if err != nil {
  30. c.AbortWithStatusJSON(http.StatusInternalServerError, "failed to unmarshal request")
  31. }
  32. c.Next()
  33. }
  34. }
  35. func TestChatMiddleware(t *testing.T) {
  36. type testCase struct {
  37. name string
  38. body string
  39. req api.ChatRequest
  40. err ErrorResponse
  41. }
  42. var capturedRequest *api.ChatRequest
  43. testCases := []testCase{
  44. {
  45. name: "chat handler",
  46. body: `{
  47. "model": "test-model",
  48. "messages": [
  49. {"role": "user", "content": "Hello"}
  50. ]
  51. }`,
  52. req: api.ChatRequest{
  53. Model: "test-model",
  54. Messages: []api.Message{
  55. {
  56. Role: "user",
  57. Content: "Hello",
  58. },
  59. },
  60. Options: map[string]any{
  61. "temperature": 1.0,
  62. "top_p": 1.0,
  63. },
  64. Stream: &False,
  65. },
  66. },
  67. {
  68. name: "chat handler with options",
  69. body: `{
  70. "model": "test-model",
  71. "messages": [
  72. {"role": "user", "content": "Hello"}
  73. ],
  74. "stream": true,
  75. "max_completion_tokens": 999,
  76. "seed": 123,
  77. "stop": ["\n", "stop"],
  78. "temperature": 3.0,
  79. "frequency_penalty": 4.0,
  80. "presence_penalty": 5.0,
  81. "top_p": 6.0,
  82. "response_format": {"type": "json_object"}
  83. }`,
  84. req: api.ChatRequest{
  85. Model: "test-model",
  86. Messages: []api.Message{
  87. {
  88. Role: "user",
  89. Content: "Hello",
  90. },
  91. },
  92. Options: map[string]any{
  93. "num_predict": 999.0, // float because JSON doesn't distinguish between float and int
  94. "seed": 123.0,
  95. "stop": []any{"\n", "stop"},
  96. "temperature": 3.0,
  97. "frequency_penalty": 4.0,
  98. "presence_penalty": 5.0,
  99. "top_p": 6.0,
  100. },
  101. Format: json.RawMessage(`"json"`),
  102. Stream: &True,
  103. },
  104. },
  105. {
  106. name: "chat handler with streaming usage",
  107. body: `{
  108. "model": "test-model",
  109. "messages": [
  110. {"role": "user", "content": "Hello"}
  111. ],
  112. "stream": true,
  113. "stream_options": {"include_usage": true},
  114. "max_tokens": 999,
  115. "seed": 123,
  116. "stop": ["\n", "stop"],
  117. "temperature": 3.0,
  118. "frequency_penalty": 4.0,
  119. "presence_penalty": 5.0,
  120. "top_p": 6.0,
  121. "response_format": {"type": "json_object"}
  122. }`,
  123. req: api.ChatRequest{
  124. Model: "test-model",
  125. Messages: []api.Message{
  126. {
  127. Role: "user",
  128. Content: "Hello",
  129. },
  130. },
  131. Options: map[string]any{
  132. "num_predict": 999.0, // float because JSON doesn't distinguish between float and int
  133. "seed": 123.0,
  134. "stop": []any{"\n", "stop"},
  135. "temperature": 3.0,
  136. "frequency_penalty": 4.0,
  137. "presence_penalty": 5.0,
  138. "top_p": 6.0,
  139. },
  140. Format: json.RawMessage(`"json"`),
  141. Stream: &True,
  142. },
  143. },
  144. {
  145. name: "chat handler with image content",
  146. body: `{
  147. "model": "test-model",
  148. "messages": [
  149. {
  150. "role": "user",
  151. "content": [
  152. {
  153. "type": "text",
  154. "text": "Hello"
  155. },
  156. {
  157. "type": "image_url",
  158. "image_url": {
  159. "url": "` + prefix + image + `"
  160. }
  161. }
  162. ]
  163. }
  164. ]
  165. }`,
  166. req: api.ChatRequest{
  167. Model: "test-model",
  168. Messages: []api.Message{
  169. {
  170. Role: "user",
  171. Content: "Hello",
  172. },
  173. {
  174. Role: "user",
  175. Images: []api.ImageData{
  176. func() []byte {
  177. img, _ := base64.StdEncoding.DecodeString(image)
  178. return img
  179. }(),
  180. },
  181. },
  182. },
  183. Options: map[string]any{
  184. "temperature": 1.0,
  185. "top_p": 1.0,
  186. },
  187. Stream: &False,
  188. },
  189. },
  190. {
  191. name: "chat handler with tools",
  192. body: `{
  193. "model": "test-model",
  194. "messages": [
  195. {"role": "user", "content": "What's the weather like in Paris Today?"},
  196. {"role": "assistant", "tool_calls": [{"id": "id", "type": "function", "function": {"name": "get_current_weather", "arguments": "{\"location\": \"Paris, France\", \"format\": \"celsius\"}"}}]}
  197. ]
  198. }`,
  199. req: api.ChatRequest{
  200. Model: "test-model",
  201. Messages: []api.Message{
  202. {
  203. Role: "user",
  204. Content: "What's the weather like in Paris Today?",
  205. },
  206. {
  207. Role: "assistant",
  208. ToolCalls: []api.ToolCall{
  209. {
  210. Function: api.ToolCallFunction{
  211. Name: "get_current_weather",
  212. Arguments: map[string]interface{}{
  213. "location": "Paris, France",
  214. "format": "celsius",
  215. },
  216. },
  217. },
  218. },
  219. },
  220. },
  221. Options: map[string]any{
  222. "temperature": 1.0,
  223. "top_p": 1.0,
  224. },
  225. Stream: &False,
  226. },
  227. },
  228. {
  229. name: "chat handler with streaming tools",
  230. body: `{
  231. "model": "test-model",
  232. "messages": [
  233. {"role": "user", "content": "What's the weather like in Paris?"}
  234. ],
  235. "stream": true,
  236. "tools": [{
  237. "type": "function",
  238. "function": {
  239. "name": "get_weather",
  240. "description": "Get the current weather",
  241. "parameters": {
  242. "type": "object",
  243. "required": ["location"],
  244. "properties": {
  245. "location": {
  246. "type": "string",
  247. "description": "The city and state"
  248. },
  249. "unit": {
  250. "type": "string",
  251. "enum": ["celsius", "fahrenheit"]
  252. }
  253. }
  254. }
  255. }
  256. }]
  257. }`,
  258. req: api.ChatRequest{
  259. Model: "test-model",
  260. Messages: []api.Message{
  261. {
  262. Role: "user",
  263. Content: "What's the weather like in Paris?",
  264. },
  265. },
  266. Tools: []api.Tool{
  267. {
  268. Type: "function",
  269. Function: api.ToolFunction{
  270. Name: "get_weather",
  271. Description: "Get the current weather",
  272. Parameters: struct {
  273. Type string `json:"type"`
  274. Required []string `json:"required"`
  275. Properties map[string]struct {
  276. Type string `json:"type"`
  277. Description string `json:"description"`
  278. Enum []string `json:"enum,omitempty"`
  279. } `json:"properties"`
  280. }{
  281. Type: "object",
  282. Required: []string{"location"},
  283. Properties: map[string]struct {
  284. Type string `json:"type"`
  285. Description string `json:"description"`
  286. Enum []string `json:"enum,omitempty"`
  287. }{
  288. "location": {
  289. Type: "string",
  290. Description: "The city and state",
  291. },
  292. "unit": {
  293. Type: "string",
  294. Enum: []string{"celsius", "fahrenheit"},
  295. },
  296. },
  297. },
  298. },
  299. },
  300. },
  301. Options: map[string]any{
  302. "temperature": 1.0,
  303. "top_p": 1.0,
  304. },
  305. Stream: &True,
  306. },
  307. },
  308. {
  309. name: "chat handler with num_ctx",
  310. body: `{
  311. "model": "test-model",
  312. "messages": [{"role": "user", "content": "Hello"}],
  313. "num_ctx": 4096
  314. }`,
  315. req: api.ChatRequest{
  316. Model: "test-model",
  317. Messages: []api.Message{{Role: "user", Content: "Hello"}},
  318. Options: map[string]any{
  319. "num_ctx": 4096.0, // float because JSON doesn't distinguish between float and int
  320. "temperature": 1.0,
  321. "top_p": 1.0,
  322. },
  323. Stream: &False,
  324. },
  325. },
  326. {
  327. name: "chat handler with max_completion_tokens < num_ctx",
  328. body: `{
  329. "model": "test-model",
  330. "messages": [{"role": "user", "content": "Hello"}],
  331. "max_completion_tokens": 2
  332. }`,
  333. req: api.ChatRequest{
  334. Model: "test-model",
  335. Messages: []api.Message{{Role: "user", Content: "Hello"}},
  336. Options: map[string]any{
  337. "num_predict": 2.0, // float because JSON doesn't distinguish between float and int
  338. "temperature": 1.0,
  339. "top_p": 1.0,
  340. },
  341. Stream: &False,
  342. },
  343. },
  344. {
  345. name: "chat handler with max_completion_tokens > num_ctx",
  346. body: `{
  347. "model": "test-model",
  348. "messages": [{"role": "user", "content": "Hello"}],
  349. "max_completion_tokens": 4096
  350. }`,
  351. req: api.ChatRequest{
  352. Model: "test-model",
  353. Messages: []api.Message{{Role: "user", Content: "Hello"}},
  354. Options: map[string]any{
  355. "num_predict": 4096.0, // float because JSON doesn't distinguish between float and int
  356. "num_ctx": 4096.0,
  357. "temperature": 1.0,
  358. "top_p": 1.0,
  359. },
  360. Stream: &False,
  361. },
  362. },
  363. {
  364. name: "chat handler error forwarding",
  365. body: `{
  366. "model": "test-model",
  367. "messages": [
  368. {"role": "user", "content": 2}
  369. ]
  370. }`,
  371. err: ErrorResponse{
  372. Error: Error{
  373. Message: "invalid message content type: float64",
  374. Type: "invalid_request_error",
  375. },
  376. },
  377. },
  378. }
  379. endpoint := func(c *gin.Context) {
  380. c.Status(http.StatusOK)
  381. }
  382. gin.SetMode(gin.TestMode)
  383. router := gin.New()
  384. router.Use(ChatMiddleware(), captureRequestMiddleware(&capturedRequest))
  385. router.Handle(http.MethodPost, "/api/chat", endpoint)
  386. for _, tc := range testCases {
  387. t.Run(tc.name, func(t *testing.T) {
  388. req, _ := http.NewRequest(http.MethodPost, "/api/chat", strings.NewReader(tc.body))
  389. req.Header.Set("Content-Type", "application/json")
  390. defer func() { capturedRequest = nil }()
  391. resp := httptest.NewRecorder()
  392. router.ServeHTTP(resp, req)
  393. var errResp ErrorResponse
  394. if resp.Code != http.StatusOK {
  395. if err := json.Unmarshal(resp.Body.Bytes(), &errResp); err != nil {
  396. t.Fatal(err)
  397. }
  398. return
  399. }
  400. if diff := cmp.Diff(&tc.req, capturedRequest); diff != "" {
  401. t.Fatalf("requests did not match (-want +got):\n%s", diff)
  402. }
  403. if diff := cmp.Diff(tc.err, errResp); diff != "" {
  404. t.Fatalf("errors did not match for %s:\n%s", tc.name, diff)
  405. }
  406. })
  407. }
  408. }
  409. func TestCompletionsMiddleware(t *testing.T) {
  410. type testCase struct {
  411. name string
  412. body string
  413. req api.GenerateRequest
  414. err ErrorResponse
  415. }
  416. var capturedRequest *api.GenerateRequest
  417. testCases := []testCase{
  418. {
  419. name: "completions handler",
  420. body: `{
  421. "model": "test-model",
  422. "prompt": "Hello",
  423. "temperature": 0.8,
  424. "stop": ["\n", "stop"],
  425. "suffix": "suffix"
  426. }`,
  427. req: api.GenerateRequest{
  428. Model: "test-model",
  429. Prompt: "Hello",
  430. Options: map[string]any{
  431. "frequency_penalty": 0.0,
  432. "presence_penalty": 0.0,
  433. "temperature": 0.8,
  434. "top_p": 1.0,
  435. "stop": []any{"\n", "stop"},
  436. },
  437. Suffix: "suffix",
  438. Stream: &False,
  439. },
  440. },
  441. {
  442. name: "completions handler stream",
  443. body: `{
  444. "model": "test-model",
  445. "prompt": "Hello",
  446. "stream": true,
  447. "temperature": 0.8,
  448. "stop": ["\n", "stop"],
  449. "suffix": "suffix"
  450. }`,
  451. req: api.GenerateRequest{
  452. Model: "test-model",
  453. Prompt: "Hello",
  454. Options: map[string]any{
  455. "frequency_penalty": 0.0,
  456. "presence_penalty": 0.0,
  457. "temperature": 0.8,
  458. "top_p": 1.0,
  459. "stop": []any{"\n", "stop"},
  460. },
  461. Suffix: "suffix",
  462. Stream: &True,
  463. },
  464. },
  465. {
  466. name: "completions handler stream with usage",
  467. body: `{
  468. "model": "test-model",
  469. "prompt": "Hello",
  470. "stream": true,
  471. "stream_options": {"include_usage": true},
  472. "temperature": 0.8,
  473. "stop": ["\n", "stop"],
  474. "suffix": "suffix"
  475. }`,
  476. req: api.GenerateRequest{
  477. Model: "test-model",
  478. Prompt: "Hello",
  479. Options: map[string]any{
  480. "frequency_penalty": 0.0,
  481. "presence_penalty": 0.0,
  482. "temperature": 0.8,
  483. "top_p": 1.0,
  484. "stop": []any{"\n", "stop"},
  485. },
  486. Suffix: "suffix",
  487. Stream: &True,
  488. },
  489. },
  490. {
  491. name: "completions handler error forwarding",
  492. body: `{
  493. "model": "test-model",
  494. "prompt": "Hello",
  495. "temperature": null,
  496. "stop": [1, 2],
  497. "suffix": "suffix"
  498. }`,
  499. err: ErrorResponse{
  500. Error: Error{
  501. Message: "invalid type for 'stop' field: float64",
  502. Type: "invalid_request_error",
  503. },
  504. },
  505. },
  506. }
  507. endpoint := func(c *gin.Context) {
  508. c.Status(http.StatusOK)
  509. }
  510. gin.SetMode(gin.TestMode)
  511. router := gin.New()
  512. router.Use(CompletionsMiddleware(), captureRequestMiddleware(&capturedRequest))
  513. router.Handle(http.MethodPost, "/api/generate", endpoint)
  514. for _, tc := range testCases {
  515. t.Run(tc.name, func(t *testing.T) {
  516. req, _ := http.NewRequest(http.MethodPost, "/api/generate", strings.NewReader(tc.body))
  517. req.Header.Set("Content-Type", "application/json")
  518. resp := httptest.NewRecorder()
  519. router.ServeHTTP(resp, req)
  520. var errResp ErrorResponse
  521. if resp.Code != http.StatusOK {
  522. if err := json.Unmarshal(resp.Body.Bytes(), &errResp); err != nil {
  523. t.Fatal(err)
  524. }
  525. }
  526. if capturedRequest != nil {
  527. if diff := cmp.Diff(tc.req, *capturedRequest); diff != "" {
  528. t.Fatalf("requests did not match (-want +got):\n%s", diff)
  529. }
  530. }
  531. if diff := cmp.Diff(tc.err, errResp); diff != "" {
  532. t.Fatalf("errors did not match (-want +got):\n%s", diff)
  533. }
  534. capturedRequest = nil
  535. })
  536. }
  537. }
  538. func TestEmbeddingsMiddleware(t *testing.T) {
  539. type testCase struct {
  540. name string
  541. body string
  542. req api.EmbedRequest
  543. err ErrorResponse
  544. }
  545. var capturedRequest *api.EmbedRequest
  546. testCases := []testCase{
  547. {
  548. name: "embed handler single input",
  549. body: `{
  550. "input": "Hello",
  551. "model": "test-model"
  552. }`,
  553. req: api.EmbedRequest{
  554. Input: "Hello",
  555. Model: "test-model",
  556. },
  557. },
  558. {
  559. name: "embed handler batch input",
  560. body: `{
  561. "input": ["Hello", "World"],
  562. "model": "test-model"
  563. }`,
  564. req: api.EmbedRequest{
  565. Input: []any{"Hello", "World"},
  566. Model: "test-model",
  567. },
  568. },
  569. {
  570. name: "embed handler error forwarding",
  571. body: `{
  572. "model": "test-model"
  573. }`,
  574. err: ErrorResponse{
  575. Error: Error{
  576. Message: "invalid input",
  577. Type: "invalid_request_error",
  578. },
  579. },
  580. },
  581. }
  582. endpoint := func(c *gin.Context) {
  583. c.Status(http.StatusOK)
  584. }
  585. gin.SetMode(gin.TestMode)
  586. router := gin.New()
  587. router.Use(EmbeddingsMiddleware(), captureRequestMiddleware(&capturedRequest))
  588. router.Handle(http.MethodPost, "/api/embed", endpoint)
  589. for _, tc := range testCases {
  590. t.Run(tc.name, func(t *testing.T) {
  591. req, _ := http.NewRequest(http.MethodPost, "/api/embed", strings.NewReader(tc.body))
  592. req.Header.Set("Content-Type", "application/json")
  593. resp := httptest.NewRecorder()
  594. router.ServeHTTP(resp, req)
  595. var errResp ErrorResponse
  596. if resp.Code != http.StatusOK {
  597. if err := json.Unmarshal(resp.Body.Bytes(), &errResp); err != nil {
  598. t.Fatal(err)
  599. }
  600. }
  601. if capturedRequest != nil {
  602. if diff := cmp.Diff(tc.req, *capturedRequest); diff != "" {
  603. t.Fatalf("requests did not match (-want +got):\n%s", diff)
  604. }
  605. }
  606. if diff := cmp.Diff(tc.err, errResp); diff != "" {
  607. t.Fatalf("errors did not match (-want +got):\n%s", diff)
  608. }
  609. capturedRequest = nil
  610. })
  611. }
  612. }
  613. func TestListMiddleware(t *testing.T) {
  614. type testCase struct {
  615. name string
  616. endpoint func(c *gin.Context)
  617. resp string
  618. }
  619. testCases := []testCase{
  620. {
  621. name: "list handler",
  622. endpoint: func(c *gin.Context) {
  623. c.JSON(http.StatusOK, api.ListResponse{
  624. Models: []api.ListModelResponse{
  625. {
  626. Name: "test-model",
  627. ModifiedAt: time.Unix(int64(1686935002), 0).UTC(),
  628. },
  629. },
  630. })
  631. },
  632. resp: `{
  633. "object": "list",
  634. "data": [
  635. {
  636. "id": "test-model",
  637. "object": "model",
  638. "created": 1686935002,
  639. "owned_by": "library"
  640. }
  641. ]
  642. }`,
  643. },
  644. {
  645. name: "list handler empty output",
  646. endpoint: func(c *gin.Context) {
  647. c.JSON(http.StatusOK, api.ListResponse{})
  648. },
  649. resp: `{
  650. "object": "list",
  651. "data": null
  652. }`,
  653. },
  654. }
  655. gin.SetMode(gin.TestMode)
  656. for _, tc := range testCases {
  657. router := gin.New()
  658. router.Use(ListMiddleware())
  659. router.Handle(http.MethodGet, "/api/tags", tc.endpoint)
  660. req, _ := http.NewRequest(http.MethodGet, "/api/tags", nil)
  661. resp := httptest.NewRecorder()
  662. router.ServeHTTP(resp, req)
  663. var expected, actual map[string]any
  664. err := json.Unmarshal([]byte(tc.resp), &expected)
  665. if err != nil {
  666. t.Fatalf("failed to unmarshal expected response: %v", err)
  667. }
  668. err = json.Unmarshal(resp.Body.Bytes(), &actual)
  669. if err != nil {
  670. t.Fatalf("failed to unmarshal actual response: %v", err)
  671. }
  672. if diff := cmp.Diff(expected, actual); diff != "" {
  673. t.Errorf("responses did not match (-want +got):\n%s", diff)
  674. }
  675. }
  676. }
  677. func TestRetrieveMiddleware(t *testing.T) {
  678. type testCase struct {
  679. name string
  680. endpoint func(c *gin.Context)
  681. resp string
  682. }
  683. testCases := []testCase{
  684. {
  685. name: "retrieve handler",
  686. endpoint: func(c *gin.Context) {
  687. c.JSON(http.StatusOK, api.ShowResponse{
  688. ModifiedAt: time.Unix(int64(1686935002), 0).UTC(),
  689. })
  690. },
  691. resp: `{
  692. "id":"test-model",
  693. "object":"model",
  694. "created":1686935002,
  695. "owned_by":"library"}
  696. `,
  697. },
  698. {
  699. name: "retrieve handler error forwarding",
  700. endpoint: func(c *gin.Context) {
  701. c.JSON(http.StatusBadRequest, gin.H{"error": "model not found"})
  702. },
  703. resp: `{
  704. "error": {
  705. "code": null,
  706. "message": "model not found",
  707. "param": null,
  708. "type": "api_error"
  709. }
  710. }`,
  711. },
  712. }
  713. gin.SetMode(gin.TestMode)
  714. for _, tc := range testCases {
  715. router := gin.New()
  716. router.Use(RetrieveMiddleware())
  717. router.Handle(http.MethodGet, "/api/show/:model", tc.endpoint)
  718. req, _ := http.NewRequest(http.MethodGet, "/api/show/test-model", nil)
  719. resp := httptest.NewRecorder()
  720. router.ServeHTTP(resp, req)
  721. var expected, actual map[string]any
  722. err := json.Unmarshal([]byte(tc.resp), &expected)
  723. if err != nil {
  724. t.Fatalf("failed to unmarshal expected response: %v", err)
  725. }
  726. err = json.Unmarshal(resp.Body.Bytes(), &actual)
  727. if err != nil {
  728. t.Fatalf("failed to unmarshal actual response: %v", err)
  729. }
  730. if diff := cmp.Diff(expected, actual); diff != "" {
  731. t.Errorf("responses did not match (-want +got):\n%s", diff)
  732. }
  733. }
  734. }