|
@@ -247,6 +247,23 @@ curl http://localhost:11434/api/generate -d '{
|
|
}'
|
|
}'
|
|
```
|
|
```
|
|
|
|
|
|
|
|
+#### Request (Reproducible outputs)
|
|
|
|
+
|
|
|
|
+For reproducible outputs, set `temperature` to 0 and `seed` to a number:
|
|
|
|
+
|
|
|
|
+##### Request
|
|
|
|
+
|
|
|
|
+```shell
|
|
|
|
+curl http://localhost:11434/api/generate -d '{
|
|
|
|
+ "model": "mistral",
|
|
|
|
+ "prompt": "[INST] why is the sky blue? [/INST]",
|
|
|
|
+ "options": {
|
|
|
|
+ "seed": 101,
|
|
|
|
+ "temperature": 0
|
|
|
|
+ }
|
|
|
|
+}'
|
|
|
|
+```
|
|
|
|
+
|
|
##### Response
|
|
##### Response
|
|
|
|
|
|
```json
|
|
```json
|
|
@@ -570,6 +587,46 @@ curl http://localhost:11434/api/chat -d '{
|
|
}
|
|
}
|
|
```
|
|
```
|
|
|
|
|
|
|
|
+#### Chat request (Reproducible outputs)
|
|
|
|
+
|
|
|
|
+##### Request
|
|
|
|
+
|
|
|
|
+```shell
|
|
|
|
+curl http://localhost:11434/api/chat -d '{
|
|
|
|
+ "model": "llama2",
|
|
|
|
+ "messages": [
|
|
|
|
+ {
|
|
|
|
+ "role": "user",
|
|
|
|
+ "content": "Hello!"
|
|
|
|
+ }
|
|
|
|
+ ],
|
|
|
|
+ "options": {
|
|
|
|
+ "seed": 101,
|
|
|
|
+ "temperature": 0
|
|
|
|
+ }
|
|
|
|
+}'
|
|
|
|
+```
|
|
|
|
+
|
|
|
|
+##### Response
|
|
|
|
+
|
|
|
|
+```json
|
|
|
|
+{
|
|
|
|
+ "model": "registry.ollama.ai/library/llama2:latest",
|
|
|
|
+ "created_at": "2023-12-12T14:13:43.416799Z",
|
|
|
|
+ "message": {
|
|
|
|
+ "role": "assistant",
|
|
|
|
+ "content": "Hello! How are you today?"
|
|
|
|
+ },
|
|
|
|
+ "done": true,
|
|
|
|
+ "total_duration": 5191566416,
|
|
|
|
+ "load_duration": 2154458,
|
|
|
|
+ "prompt_eval_count": 26,
|
|
|
|
+ "prompt_eval_duration": 383809000,
|
|
|
|
+ "eval_count": 298,
|
|
|
|
+ "eval_duration": 4799921000
|
|
|
|
+}
|
|
|
|
+```
|
|
|
|
+
|
|
## Create a Model
|
|
## Create a Model
|
|
|
|
|
|
```shell
|
|
```shell
|