Browse Source

add verbose mode to the show command (#9640)

Add metadata and tensor information to the show command to be able to
see more information about a model. This outputs the same data as
shown on the model details page on ollama.com
Patrick Devine 1 month ago
parent
commit
4bed739259
6 changed files with 130 additions and 19 deletions
  1. 8 0
      api/types.go
  2. 46 4
      cmd/cmd.go
  3. 56 6
      cmd/cmd_test.go
  4. 1 1
      cmd/interactive.go
  5. 4 0
      fs/ggml/ggml.go
  6. 15 8
      server/routes.go

+ 8 - 0
api/types.go

@@ -349,6 +349,7 @@ type ShowResponse struct {
 	Messages      []Message      `json:"messages,omitempty"`
 	ModelInfo     map[string]any `json:"model_info,omitempty"`
 	ProjectorInfo map[string]any `json:"projector_info,omitempty"`
+	Tensors       []Tensor       `json:"tensors,omitempty"`
 	ModifiedAt    time.Time      `json:"modified_at,omitempty"`
 }
 
@@ -467,6 +468,13 @@ type ModelDetails struct {
 	QuantizationLevel string   `json:"quantization_level"`
 }
 
+// Tensor describes the metadata for a given tensor.
+type Tensor struct {
+	Name  string   `json:"name"`
+	Type  string   `json:"type"`
+	Shape []uint64 `json:"shape"`
+}
+
 func (m *Metrics) Summary() {
 	if m.TotalDuration > 0 {
 		fmt.Fprintf(os.Stderr, "total duration:       %v\n", m.TotalDuration)

+ 46 - 4
cmd/cmd.go

@@ -18,6 +18,7 @@ import (
 	"os/signal"
 	"path/filepath"
 	"runtime"
+	"sort"
 	"strconv"
 	"strings"
 	"sync/atomic"
@@ -568,8 +569,9 @@ func ShowHandler(cmd *cobra.Command, args []string) error {
 	parameters, errParams := cmd.Flags().GetBool("parameters")
 	system, errSystem := cmd.Flags().GetBool("system")
 	template, errTemplate := cmd.Flags().GetBool("template")
+	verbose, errVerbose := cmd.Flags().GetBool("verbose")
 
-	for _, boolErr := range []error{errLicense, errModelfile, errParams, errSystem, errTemplate} {
+	for _, boolErr := range []error{errLicense, errModelfile, errParams, errSystem, errTemplate, errVerbose} {
 		if boolErr != nil {
 			return errors.New("error retrieving flags")
 		}
@@ -607,7 +609,7 @@ func ShowHandler(cmd *cobra.Command, args []string) error {
 		return errors.New("only one of '--license', '--modelfile', '--parameters', '--system', or '--template' can be specified")
 	}
 
-	req := api.ShowRequest{Name: args[0]}
+	req := api.ShowRequest{Name: args[0], Verbose: verbose}
 	resp, err := client.Show(cmd.Context(), &req)
 	if err != nil {
 		return err
@@ -630,10 +632,10 @@ func ShowHandler(cmd *cobra.Command, args []string) error {
 		return nil
 	}
 
-	return showInfo(resp, os.Stdout)
+	return showInfo(resp, verbose, os.Stdout)
 }
 
-func showInfo(resp *api.ShowResponse, w io.Writer) error {
+func showInfo(resp *api.ShowResponse, verbose bool, w io.Writer) error {
 	tableRender := func(header string, rows func() [][]string) {
 		fmt.Fprintln(w, " ", header)
 		table := tablewriter.NewWriter(w)
@@ -690,6 +692,45 @@ func showInfo(resp *api.ShowResponse, w io.Writer) error {
 		})
 	}
 
+	if resp.ModelInfo != nil && verbose {
+		tableRender("Metadata", func() (rows [][]string) {
+			keys := make([]string, 0, len(resp.ModelInfo))
+			for k := range resp.ModelInfo {
+				keys = append(keys, k)
+			}
+			sort.Strings(keys)
+
+			for _, k := range keys {
+				var v string
+				switch vData := resp.ModelInfo[k].(type) {
+				case string:
+					v = vData
+				case float64:
+					v = fmt.Sprintf("%g", vData)
+				case []any:
+					n := 3
+					if len(vData) < n {
+						n = len(vData)
+					}
+					v = fmt.Sprintf("%v", vData[:n])
+				default:
+					v = fmt.Sprintf("%T", vData)
+				}
+				rows = append(rows, []string{"", k, v})
+			}
+			return
+		})
+	}
+
+	if len(resp.Tensors) > 0 && verbose {
+		tableRender("Tensors", func() (rows [][]string) {
+			for _, t := range resp.Tensors {
+				rows = append(rows, []string{"", t.Name, t.Type, fmt.Sprint(t.Shape)})
+			}
+			return
+		})
+	}
+
 	head := func(s string, n int) (rows [][]string) {
 		scanner := bufio.NewScanner(strings.NewReader(s))
 		for scanner.Scan() && (len(rows) < n || n < 0) {
@@ -1196,6 +1237,7 @@ func NewCLI() *cobra.Command {
 	showCmd.Flags().Bool("parameters", false, "Show parameters of a model")
 	showCmd.Flags().Bool("template", false, "Show template of a model")
 	showCmd.Flags().Bool("system", false, "Show system message of a model")
+	showCmd.Flags().BoolP("verbose", "v", false, "Show detailed model information")
 
 	runCmd := &cobra.Command{
 		Use:     "run MODEL [PROMPT]",

+ 56 - 6
cmd/cmd_test.go

@@ -27,7 +27,7 @@ func TestShowInfo(t *testing.T) {
 				ParameterSize:     "7B",
 				QuantizationLevel: "FP16",
 			},
-		}, &b); err != nil {
+		}, false, &b); err != nil {
 			t.Fatal(err)
 		}
 
@@ -57,7 +57,7 @@ func TestShowInfo(t *testing.T) {
 				ParameterSize:     "7B",
 				QuantizationLevel: "FP16",
 			},
-		}, &b); err != nil {
+		}, false, &b); err != nil {
 			t.Fatal(err)
 		}
 
@@ -68,6 +68,56 @@ func TestShowInfo(t *testing.T) {
     embedding length    0       
     quantization        FP16    
 
+`
+		if diff := cmp.Diff(expect, b.String()); diff != "" {
+			t.Errorf("unexpected output (-want +got):\n%s", diff)
+		}
+	})
+
+	t.Run("verbose model", func(t *testing.T) {
+		var b bytes.Buffer
+		if err := showInfo(&api.ShowResponse{
+			Details: api.ModelDetails{
+				Family:            "test",
+				ParameterSize:     "8B",
+				QuantizationLevel: "FP16",
+			},
+			Parameters: `
+			stop up`,
+			ModelInfo: map[string]any{
+				"general.architecture":    "test",
+				"general.parameter_count": float64(8_000_000_000),
+				"test.context_length":     float64(1000),
+				"test.embedding_length":   float64(11434),
+			},
+			Tensors: []api.Tensor{
+				{Name: "blk.0.attn_k.weight", Type: "BF16", Shape: []uint64{42, 3117}},
+				{Name: "blk.0.attn_q.weight", Type: "FP16", Shape: []uint64{3117, 42}},
+			},
+		}, true, &b); err != nil {
+			t.Fatal(err)
+		}
+
+		expect := `  Model
+    architecture        test     
+    parameters          8B       
+    context length      1000     
+    embedding length    11434    
+    quantization        FP16     
+
+  Parameters
+    stop    up    
+
+  Metadata
+    general.architecture       test     
+    general.parameter_count    8e+09    
+    test.context_length        1000     
+    test.embedding_length      11434    
+
+  Tensors
+    blk.0.attn_k.weight    BF16    [42 3117]    
+    blk.0.attn_q.weight    FP16    [3117 42]    
+
 `
 		if diff := cmp.Diff(expect, b.String()); diff != "" {
 			t.Errorf("unexpected output (-want +got):\n%s", diff)
@@ -89,7 +139,7 @@ func TestShowInfo(t *testing.T) {
 			stop you
 			stop up
 			temperature 99`,
-		}, &b); err != nil {
+		}, false, &b); err != nil {
 			t.Fatal(err)
 		}
 
@@ -126,7 +176,7 @@ func TestShowInfo(t *testing.T) {
 				"clip.vision.embedding_length": float64(0),
 				"clip.vision.projection_dim":   float64(0),
 			},
-		}, &b); err != nil {
+		}, false, &b); err != nil {
 			t.Fatal(err)
 		}
 
@@ -159,7 +209,7 @@ func TestShowInfo(t *testing.T) {
 Ahoy, matey!
 Weigh anchor!
 			`,
-		}, &b); err != nil {
+		}, false, &b); err != nil {
 			t.Fatal(err)
 		}
 
@@ -188,7 +238,7 @@ Weigh anchor!
 				QuantizationLevel: "FP16",
 			},
 			License: license,
-		}, &b); err != nil {
+		}, false, &b); err != nil {
 			t.Fatal(err)
 		}
 

+ 1 - 1
cmd/interactive.go

@@ -347,7 +347,7 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
 
 				switch args[1] {
 				case "info":
-					_ = showInfo(resp, os.Stderr)
+					_ = showInfo(resp, false, os.Stderr)
 				case "license":
 					if resp.License == "" {
 						fmt.Println("No license was specified for this model.")

+ 4 - 0
fs/ggml/ggml.go

@@ -327,6 +327,10 @@ func (t Tensor) Size() uint64 {
 	return t.parameters() * t.typeSize() / t.blockSize()
 }
 
+func (t Tensor) Type() string {
+	return fileType(t.Kind).String()
+}
+
 type container interface {
 	Name() string
 	Decode(io.ReadSeeker) (model, error)

+ 15 - 8
server/routes.go

@@ -435,7 +435,7 @@ func (s *Server) EmbedHandler(c *gin.Context) {
 		return
 	}
 
-	kvData, err := getKVData(m.ModelPath, false)
+	kvData, _, err := getModelData(m.ModelPath, false)
 	if err != nil {
 		c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
 		return
@@ -848,16 +848,23 @@ func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
 	fmt.Fprint(&sb, m.String())
 	resp.Modelfile = sb.String()
 
-	kvData, err := getKVData(m.ModelPath, req.Verbose)
+	kvData, tensors, err := getModelData(m.ModelPath, req.Verbose)
 	if err != nil {
 		return nil, err
 	}
+
 	delete(kvData, "general.name")
 	delete(kvData, "tokenizer.chat_template")
 	resp.ModelInfo = kvData
 
+	tensorData := make([]api.Tensor, len(tensors.Items()))
+	for cnt, t := range tensors.Items() {
+		tensorData[cnt] = api.Tensor{Name: t.Name, Type: t.Type(), Shape: t.Shape}
+	}
+	resp.Tensors = tensorData
+
 	if len(m.ProjectorPaths) > 0 {
-		projectorData, err := getKVData(m.ProjectorPaths[0], req.Verbose)
+		projectorData, _, err := getModelData(m.ProjectorPaths[0], req.Verbose)
 		if err != nil {
 			return nil, err
 		}
@@ -867,17 +874,17 @@ func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
 	return resp, nil
 }
 
-func getKVData(digest string, verbose bool) (ggml.KV, error) {
+func getModelData(digest string, verbose bool) (ggml.KV, ggml.Tensors, error) {
 	maxArraySize := 0
 	if verbose {
 		maxArraySize = -1
 	}
-	kvData, err := llm.LoadModel(digest, maxArraySize)
+	data, err := llm.LoadModel(digest, maxArraySize)
 	if err != nil {
-		return nil, err
+		return nil, ggml.Tensors{}, err
 	}
 
-	kv := kvData.KV()
+	kv := data.KV()
 
 	if !verbose {
 		for k := range kv {
@@ -887,7 +894,7 @@ func getKVData(digest string, verbose bool) (ggml.KV, error) {
 		}
 	}
 
-	return kv, nil
+	return kv, data.Tensors(), nil
 }
 
 func (s *Server) ListHandler(c *gin.Context) {