Browse Source

Handle models with divergent layer sizes

The recent refactoring of the memory prediction assumed all layers
are the same size, but for some models (like deepseek-coder-v2) this
is not the case, so our predictions were significantly off.
Daniel Hiltgen 10 months ago
parent
commit
359b15a597
1 changed files with 6 additions and 0 deletions
  1. 6 0
      llm/memory.go

+ 6 - 0
llm/memory.go

@@ -1,6 +1,7 @@
 package llm
 
 import (
+	"fmt"
 	"log/slog"
 	"strconv"
 	"strings"
@@ -179,6 +180,11 @@ func EstimateGPULayers(gpus []gpu.GpuInfo, ggml *GGML, projectors []string, opts
 
 	// For all the layers, find where they can fit on the GPU(s)
 	for i := range int(ggml.KV().BlockCount()) {
+		// Some models have inconsistent layer sizes
+		if blk, ok := layers[fmt.Sprintf("blk.%d", i)]; ok {
+			layerSize = blk.size()
+			layerSize += kv / ggml.KV().BlockCount()
+		}
 		memoryWeights += layerSize
 
 		if opts.NumGPU >= 0 && layerCount >= opts.NumGPU {