ext_server_windows.go 466 B

123456789101112
  1. package llm
  2. import (
  3. "github.com/jmorganca/ollama/api"
  4. )
  5. func newDefaultExtServer(model string, adapters, projectors []string, numLayers int64, opts api.Options) (extServer, error) {
  6. // On windows we always load the llama.cpp libraries dynamically to avoid startup DLL dependencies
  7. // This ensures we can update the PATH at runtime to get everything loaded
  8. return newDynamicShimExtServer(AvailableShims["cpu"], model, adapters, projectors, numLayers, opts)
  9. }