Преглед на файлове

Merge branch 'jmorganca:main' into main

Dane Madsen преди 1 година
родител
ревизия
779e196ef6
променени са 2 файла, в които са добавени 31 реда и са изтрити 51 реда
  1. 30 50
      cmd/cmd.go
  2. 1 1
      docs/api.md

+ 30 - 50
cmd/cmd.go

@@ -1,7 +1,6 @@
 package cmd
 
 import (
-	"bufio"
 	"context"
 	"crypto/ed25519"
 	"crypto/rand"
@@ -350,34 +349,44 @@ func pull(model string, insecure bool) error {
 }
 
 func RunGenerate(cmd *cobra.Command, args []string) error {
-	if len(args) > 1 {
-		// join all args into a single prompt
-		wordWrap := false
-		if term.IsTerminal(int(os.Stdout.Fd())) {
-			wordWrap = true
-		}
+	format, err := cmd.Flags().GetString("format")
+	if err != nil {
+		return err
+	}
 
-		nowrap, err := cmd.Flags().GetBool("nowordwrap")
-		if err != nil {
-			return err
-		}
-		if nowrap {
-			wordWrap = false
-		}
+	prompts := args[1:]
 
-		format, err := cmd.Flags().GetString("format")
+	// prepend stdin to the prompt if provided
+	if !term.IsTerminal(int(os.Stdin.Fd())) {
+		in, err := io.ReadAll(os.Stdin)
 		if err != nil {
 			return err
 		}
 
-		return generate(cmd, args[0], strings.Join(args[1:], " "), wordWrap, format)
+		prompts = append([]string{string(in)}, prompts...)
+	}
+
+	// output is being piped
+	if !term.IsTerminal(int(os.Stdout.Fd())) {
+		return generate(cmd, args[0], strings.Join(prompts, " "), false, format)
+	}
+
+	wordWrap := os.Getenv("TERM") == "xterm-256color"
+
+	nowrap, err := cmd.Flags().GetBool("nowordwrap")
+	if err != nil {
+		return err
+	}
+	if nowrap {
+		wordWrap = false
 	}
 
-	if readline.IsTerminal(int(os.Stdin.Fd())) {
-		return generateInteractive(cmd, args[0])
+	// prompts are provided via stdin or args so don't enter interactive mode
+	if len(prompts) > 0 {
+		return generate(cmd, args[0], strings.Join(prompts, " "), wordWrap, format)
 	}
 
-	return generateBatch(cmd, args[0])
+	return generateInteractive(cmd, args[0], wordWrap, format)
 }
 
 type generateContextKey string
@@ -398,7 +407,7 @@ func generate(cmd *cobra.Command, model, prompt string, wordWrap bool, format st
 		generateContext = []int{}
 	}
 
-	termWidth, _, err := term.GetSize(int(0))
+	termWidth, _, err := term.GetSize(int(os.Stdout.Fd()))
 	if err != nil {
 		wordWrap = false
 	}
@@ -490,7 +499,7 @@ func generate(cmd *cobra.Command, model, prompt string, wordWrap bool, format st
 	return nil
 }
 
-func generateInteractive(cmd *cobra.Command, model string) error {
+func generateInteractive(cmd *cobra.Command, model string, wordWrap bool, format string) error {
 	// load the model
 	if err := generate(cmd, model, "", false, ""); err != nil {
 		return err
@@ -542,22 +551,6 @@ func generateInteractive(cmd *cobra.Command, model string) error {
 		return err
 	}
 
-	var format string
-	var wordWrap bool
-	termType := os.Getenv("TERM")
-	if termType == "xterm-256color" {
-		wordWrap = true
-	}
-
-	// override wrapping if the user turned it off
-	nowrap, err := cmd.Flags().GetBool("nowordwrap")
-	if err != nil {
-		return err
-	}
-	if nowrap {
-		wordWrap = false
-	}
-
 	fmt.Print(readline.StartBracketedPaste)
 	defer fmt.Printf(readline.EndBracketedPaste)
 
@@ -711,19 +704,6 @@ func generateInteractive(cmd *cobra.Command, model string) error {
 	}
 }
 
-func generateBatch(cmd *cobra.Command, model string) error {
-	scanner := bufio.NewScanner(os.Stdin)
-	for scanner.Scan() {
-		prompt := scanner.Text()
-		fmt.Printf(">>> %s\n", prompt)
-		if err := generate(cmd, model, prompt, false, ""); err != nil {
-			return err
-		}
-	}
-
-	return nil
-}
-
 func RunServer(cmd *cobra.Command, _ []string) error {
 	host, port, err := net.SplitHostPort(os.Getenv("OLLAMA_HOST"))
 	if err != nil {

+ 1 - 1
docs/api.md

@@ -38,10 +38,10 @@ Generate a response for a given prompt with a provided model. This is a streamin
 
 - `model`: (required) the [model name](#model-names)
 - `prompt`: the prompt to generate a response for
-- `format`: the format to return a response in. Currently the only accepted value is `json`
 
 Advanced parameters (optional):
 
+- `format`: the format to return a response in. Currently the only accepted value is `json`
 - `options`: additional model parameters listed in the documentation for the [Modelfile](./modelfile.md#valid-parameters-and-values) such as `temperature`
 - `system`: system prompt to (overrides what is defined in the `Modelfile`)
 - `template`: the full prompt or prompt template (overrides what is defined in the `Modelfile`)