Roy Han 10 mesi fa
parent
commit
6caac01494
2 ha cambiato i file con 0 aggiunte e 5 eliminazioni
  1. 0 1
      llm/server.go
  2. 0 4
      server/sched_test.go

+ 0 - 1
llm/server.go

@@ -33,7 +33,6 @@ type LlamaServer interface {
 	Ping(ctx context.Context) error
 	WaitUntilRunning(ctx context.Context) error
 	Completion(ctx context.Context, req CompletionRequest, fn func(CompletionResponse)) error
-	// Embedding(ctx context.Context, prompt string) ([]float64, error)
 	Embed(ctx context.Context, input []string) ([][]float32, error)
 	Tokenize(ctx context.Context, content string) ([]int, error)
 	Detokenize(ctx context.Context, tokens []int) (string, error)

+ 0 - 4
server/sched_test.go

@@ -648,10 +648,6 @@ func (s *mockLlm) WaitUntilRunning(ctx context.Context) error { return s.waitRes
 func (s *mockLlm) Completion(ctx context.Context, req llm.CompletionRequest, fn func(llm.CompletionResponse)) error {
 	return s.completionResp
 }
-
-//	func (s *mockLlm) Embedding(ctx context.Context, prompt string) ([]float64, error) {
-//		return s.embeddingResp, s.embeddingRespErr
-//	}
 func (s *mockLlm) Embed(ctx context.Context, input []string) ([][]float32, error) {
 	return s.embedResp, s.embedRespErr
 }