|
@@ -1007,7 +1007,7 @@ async def generate_chat_completion(
|
|
|
payload["options"] = apply_model_params_to_body_ollama(
|
|
|
params, payload["options"]
|
|
|
)
|
|
|
- payload = apply_model_system_prompt_to_body(params, payload, metadata)
|
|
|
+ payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
|
|
|
|
|
|
# Check if user has access to the model
|
|
|
if not bypass_filter and user.role == "user":
|
|
@@ -1160,6 +1160,8 @@ async def generate_openai_chat_completion(
|
|
|
url_idx: Optional[int] = None,
|
|
|
user=Depends(get_verified_user),
|
|
|
):
|
|
|
+ metadata = form_data.pop("metadata", None)
|
|
|
+
|
|
|
try:
|
|
|
completion_form = OpenAIChatCompletionForm(**form_data)
|
|
|
except Exception as e:
|
|
@@ -1186,7 +1188,7 @@ async def generate_openai_chat_completion(
|
|
|
|
|
|
if params:
|
|
|
payload = apply_model_params_to_body_openai(params, payload)
|
|
|
- payload = apply_model_system_prompt_to_body(params, payload, user)
|
|
|
+ payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
|
|
|
|
|
|
# Check if user has access to the model
|
|
|
if user.role == "user":
|