|
@@ -317,8 +317,22 @@ func NewLlamaServer(gpus gpu.GpuInfoList, model string, ggml *GGML, adapters, pr
|
|
|
}
|
|
|
|
|
|
slog.Info("starting llama server", "cmd", s.cmd.String())
|
|
|
- // Log at debug as the environment is inherited and might contain sensitive information
|
|
|
- slog.Debug("subprocess", "environment", s.cmd.Env)
|
|
|
+ if envconfig.Debug {
|
|
|
+ filteredEnv := []string{}
|
|
|
+ for _, ev := range s.cmd.Env {
|
|
|
+ if strings.HasPrefix(ev, "CUDA_") ||
|
|
|
+ strings.HasPrefix(ev, "ROCM_") ||
|
|
|
+ strings.HasPrefix(ev, "HIP_") ||
|
|
|
+ strings.HasPrefix(ev, "HSA_") ||
|
|
|
+ strings.HasPrefix(ev, "GGML_") ||
|
|
|
+ strings.HasPrefix(ev, "PATH=") ||
|
|
|
+ strings.HasPrefix(ev, "LD_LIBRARY_PATH=") {
|
|
|
+ filteredEnv = append(filteredEnv, ev)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // Log at debug as the environment is inherited and might contain sensitive information
|
|
|
+ slog.Debug("subprocess", "environment", filteredEnv)
|
|
|
+ }
|
|
|
|
|
|
if err = s.cmd.Start(); err != nil {
|
|
|
// Detect permission denied and augment them essage about noexec
|