Browse Source

ggml-backend: Let GGML allocate context memory

Passing in a Go buffer is not safe because the garbage collector could
free or move the memory while the context is still open. However, if
we pass in the size and a nil pointer then GGML will allocate it from
the C side.
Jesse Gross 3 months ago
parent
commit
01d9a46854
1 changed files with 2 additions and 3 deletions
  1. 2 3
      ml/backend/ggml/ggml.go

+ 2 - 3
ml/backend/ggml/ggml.go

@@ -198,10 +198,9 @@ func (b *Backend) Get(name string) ml.Tensor {
 
 
 func (b *Backend) NewContext() ml.Context {
 func (b *Backend) NewContext() ml.Context {
 	nodes := max(8192, len(b.meta.Tensors().Items())*5)
 	nodes := max(8192, len(b.meta.Tensors().Items())*5)
-	bts := make([]byte, C.size_t(nodes)*C.ggml_tensor_overhead()+C.ggml_graph_overhead_custom(C.size_t(nodes), false))
 	c := C.ggml_init(C.struct_ggml_init_params{
 	c := C.ggml_init(C.struct_ggml_init_params{
-		mem_buffer: unsafe.Pointer(&bts[0]),
-		mem_size:   C.size_t(len(bts)),
+		mem_buffer: nil,
+		mem_size:   C.size_t(nodes)*C.ggml_tensor_overhead() + C.ggml_graph_overhead_custom(C.size_t(nodes), false),
 		no_alloc:   true,
 		no_alloc:   true,
 	})
 	})