瀏覽代碼

fix: arena model exclude filter

Timothy J. Baek 6 月之前
父節點
當前提交
856c00bc2f
共有 2 個文件被更改,包括 5 次插入2 次删除
  1. 2 1
      backend/open_webui/apps/ollama/main.py
  2. 3 1
      backend/open_webui/main.py

+ 2 - 1
backend/open_webui/apps/ollama/main.py

@@ -761,6 +761,7 @@ async def generate_chat_completion(
     form_data: GenerateChatCompletionForm,
     url_idx: Optional[int] = None,
     user=Depends(get_verified_user),
+    bypass_filter: Optional[bool] = False,
 ):
     payload = {**form_data.model_dump(exclude_none=True)}
     log.debug(f"generate_chat_completion() - 1.payload = {payload}")
@@ -769,7 +770,7 @@ async def generate_chat_completion(
 
     model_id = form_data.model
 
-    if app.state.config.ENABLE_MODEL_FILTER:
+    if not bypass_filter and app.state.config.ENABLE_MODEL_FILTER:
         if user.role == "user" and model_id not in app.state.config.MODEL_FILTER_LIST:
             raise HTTPException(
                 status_code=403,

+ 3 - 1
backend/open_webui/main.py

@@ -1154,7 +1154,9 @@ async def generate_chat_completions(
         # Using /ollama/api/chat endpoint
         form_data = convert_payload_openai_to_ollama(form_data)
         form_data = GenerateChatCompletionForm(**form_data)
-        response = await generate_ollama_chat_completion(form_data=form_data, user=user)
+        response = await generate_ollama_chat_completion(
+            form_data=form_data, user=user, bypass_filter=True
+        )
         if form_data.stream:
             response.headers["content-type"] = "text/event-stream"
             return StreamingResponse(