|
@@ -29,6 +29,8 @@ chat_completion = client.chat.completions.create(
|
|
|
)
|
|
|
|
|
|
list_completion = client.models.list()
|
|
|
+
|
|
|
+model = client.models.retrieve("llama3")
|
|
|
```
|
|
|
|
|
|
### OpenAI JavaScript library
|
|
@@ -49,6 +51,8 @@ const chatCompletion = await openai.chat.completions.create({
|
|
|
})
|
|
|
|
|
|
const listCompletion = await openai.models.list()
|
|
|
+
|
|
|
+const model = await openai.models.retrieve("llama3");
|
|
|
```
|
|
|
|
|
|
### `curl`
|
|
@@ -71,6 +75,8 @@ curl http://localhost:11434/v1/chat/completions \
|
|
|
}'
|
|
|
|
|
|
curl http://localhost:11434/v1/models
|
|
|
+
|
|
|
+curl https://api.openai.com/v1/models/llama3
|
|
|
```
|
|
|
|
|
|
## Endpoints
|
|
@@ -115,6 +121,13 @@ curl http://localhost:11434/v1/models
|
|
|
- `created` corresponds to when the model was last modified
|
|
|
- `owned_by` corresponds to the ollama username, defaulting to `"library"`
|
|
|
|
|
|
+### `/v1/models/{model}`
|
|
|
+
|
|
|
+#### Notes
|
|
|
+
|
|
|
+- `created` corresponds to when the model was last modified
|
|
|
+- `owned_by` corresponds to the ollama username, defaulting to `"library"`
|
|
|
+
|
|
|
## Models
|
|
|
|
|
|
Before using a model, pull it locally `ollama pull`:
|