Преглед изворни кода

ml/backend/ggml: fix debug logging

Michael Yang пре 2 месеци
родитељ
комит
a59f665235
3 измењених фајлова са 21 додато и 34 уклоњено
  1. 15 22
      llama/llama.go
  2. 6 11
      ml/backend/ggml/ggml/src/ggml.go
  3. 0 1
      runner/llamarunner/runner.go

+ 15 - 22
llama/llama.go

@@ -37,23 +37,36 @@ COMPILER inline get_compiler() {
 import "C"
 
 import (
+	"context"
 	_ "embed"
 	"errors"
 	"fmt"
+	"log/slog"
 	"os"
 	"runtime"
 	"runtime/cgo"
 	"slices"
 	"strings"
-	"sync/atomic"
 	"unsafe"
 
 	_ "github.com/ollama/ollama/llama/llama.cpp/common"
 	_ "github.com/ollama/ollama/llama/llama.cpp/examples/llava"
 	_ "github.com/ollama/ollama/llama/llama.cpp/src"
-	"github.com/ollama/ollama/ml/backend/ggml/ggml/src"
+	ggml "github.com/ollama/ollama/ml/backend/ggml/ggml/src"
 )
 
+func init() {
+	C.llama_log_set(C.ggml_log_callback(C.llamaLog), nil)
+}
+
+//export llamaLog
+func llamaLog(level C.int, text *C.char, _ unsafe.Pointer) {
+	// slog levels zeros INFO and are multiples of 4
+	if slog.Default().Enabled(context.TODO(), slog.Level(int(level-C.GGML_LOG_LEVEL_INFO)*4)) {
+		fmt.Fprint(os.Stderr, C.GoString(text))
+	}
+}
+
 func BackendInit() {
 	ggml.OnceLoad()
 	C.llama_backend_init()
@@ -72,26 +85,6 @@ func PrintSystemInfo() string {
 	return C.GoString(C.llama_print_system_info()) + compiler
 }
 
-var logLevel atomic.Int32
-
-func init() {
-	logLevel.Store(int32(C.GGML_LOG_LEVEL_INFO))
-	C.llama_log_set((C.ggml_log_callback)(C.llamaLog), nil)
-}
-
-func EnableDebug() {
-	logLevel.Store(int32(C.GGML_LOG_LEVEL_DEBUG))
-}
-
-//export llamaLog
-func llamaLog(level int32, text *C.char, _ unsafe.Pointer) {
-	if level < logLevel.Load() {
-		return
-	}
-
-	fmt.Fprint(os.Stderr, C.GoString(text))
-}
-
 func GetModelArch(modelPath string) (string, error) {
 	mp := C.CString(modelPath)
 	defer C.free(unsafe.Pointer(mp))

+ 6 - 11
ml/backend/ggml/ggml/src/ggml.go

@@ -10,6 +10,8 @@ package ggml
 import "C"
 
 import (
+	"context"
+	"fmt"
 	"log/slog"
 	"os"
 	"path/filepath"
@@ -22,21 +24,14 @@ import (
 )
 
 func init() {
-	C.ggml_log_set((C.ggml_log_callback)(C.sink), nil)
+	C.ggml_log_set(C.ggml_log_callback(C.sink), nil)
 }
 
 //export sink
 func sink(level C.int, text *C.char, _ unsafe.Pointer) {
-	msg := strings.TrimSpace(C.GoString(text))
-	switch level {
-	case C.GGML_LOG_LEVEL_DEBUG:
-		slog.Debug(msg)
-	case C.GGML_LOG_LEVEL_INFO:
-		slog.Info(msg)
-	case C.GGML_LOG_LEVEL_WARN:
-		slog.Warn(msg)
-	case C.GGML_LOG_LEVEL_ERROR:
-		slog.Error(msg)
+	// slog levels zeros INFO and are multiples of 4
+	if slog.Default().Enabled(context.TODO(), slog.Level(int(level-C.GGML_LOG_LEVEL_INFO)*4)) {
+		fmt.Fprint(os.Stderr, C.GoString(text))
 	}
 }
 

+ 0 - 1
runner/llamarunner/runner.go

@@ -915,7 +915,6 @@ func Execute(args []string) error {
 	level := slog.LevelInfo
 	if *verbose {
 		level = slog.LevelDebug
-		llama.EnableDebug()
 	}
 	handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
 		Level:     level,