|
@@ -27,6 +27,8 @@ chat_completion = client.chat.completions.create(
|
|
],
|
|
],
|
|
model='llama3',
|
|
model='llama3',
|
|
)
|
|
)
|
|
|
|
+
|
|
|
|
+list_completion = client.models.list()
|
|
```
|
|
```
|
|
|
|
|
|
### OpenAI JavaScript library
|
|
### OpenAI JavaScript library
|
|
@@ -45,6 +47,8 @@ const chatCompletion = await openai.chat.completions.create({
|
|
messages: [{ role: 'user', content: 'Say this is a test' }],
|
|
messages: [{ role: 'user', content: 'Say this is a test' }],
|
|
model: 'llama3',
|
|
model: 'llama3',
|
|
})
|
|
})
|
|
|
|
+
|
|
|
|
+const listCompletion = await openai.models.list()
|
|
```
|
|
```
|
|
|
|
|
|
### `curl`
|
|
### `curl`
|
|
@@ -66,6 +70,7 @@ curl http://localhost:11434/v1/chat/completions \
|
|
]
|
|
]
|
|
}'
|
|
}'
|
|
|
|
|
|
|
|
+curl http://localhost:11434/v1/models
|
|
```
|
|
```
|
|
|
|
|
|
## Endpoints
|
|
## Endpoints
|
|
@@ -103,6 +108,13 @@ curl http://localhost:11434/v1/chat/completions \
|
|
- [ ] `user`
|
|
- [ ] `user`
|
|
- [ ] `n`
|
|
- [ ] `n`
|
|
|
|
|
|
|
|
+### `/v1/models`
|
|
|
|
+
|
|
|
|
+#### Notes
|
|
|
|
+
|
|
|
|
+- `created` corresponds to when the model was last modified
|
|
|
|
+- `owned_by` corresponds to the ollama username, defaulting to `"library"`
|
|
|
|
+
|
|
## Models
|
|
## Models
|
|
|
|
|
|
Before using a model, pull it locally `ollama pull`:
|
|
Before using a model, pull it locally `ollama pull`:
|