Browse Source

Merge branch 'dev' of https://github.com/open-webui/open-webui into dev

Timothy Jaeryang Baek 2 months ago
parent
commit
c28d82b10d
2 changed files with 32 additions and 16 deletions
  1. 8 1
      backend/open_webui/utils/misc.py
  2. 24 15
      backend/open_webui/utils/response.py

+ 8 - 1
backend/open_webui/utils/misc.py

@@ -217,12 +217,19 @@ def openai_chat_chunk_message_template(
 
 
 def openai_chat_completion_message_template(
-    model: str, message: Optional[str] = None, usage: Optional[dict] = None
+    model: str, 
+    message: Optional[str] = None,
+    tool_calls: Optional[list[dict]] = None,
+    usage: Optional[dict] = None
 ) -> dict:
     template = openai_chat_message_template(model)
     template["object"] = "chat.completion"
     if message is not None:
         template["choices"][0]["message"] = {"content": message, "role": "assistant"}
+
+    if tool_calls:
+        template["choices"][0]["tool_calls"] = tool_calls
+
     template["choices"][0]["finish_reason"] = "stop"
 
     if usage:

+ 24 - 15
backend/open_webui/utils/response.py

@@ -6,9 +6,31 @@ from open_webui.utils.misc import (
 )
 
 
+def convert_ollama_tool_call_to_openai(tool_calls: dict) -> dict:
+    openai_tool_calls = []
+    for tool_call in tool_calls:
+        openai_tool_call = {
+            "index": tool_call.get("index", 0),
+            "id": tool_call.get("id", f"call_{str(uuid4())}"),
+            "type": "function",
+            "function": {
+                "name": tool_call.get("function", {}).get("name", ""),
+                "arguments": json.dumps(
+                    tool_call.get('function', {}).get('arguments', {})
+                ),
+            },
+        }
+        openai_tool_calls.append(openai_tool_call)
+    return openai_tool_calls
+
 def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
     model = ollama_response.get("model", "ollama")
     message_content = ollama_response.get("message", {}).get("content", "")
+    tool_calls = ollama_response.get("message", {}).get("tool_calls", None)
+    openai_tool_calls = None
+
+    if tool_calls:
+        openai_tool_calls = convert_ollama_tool_call_to_openai(tool_calls)
 
     data = ollama_response
     usage = {
@@ -51,7 +73,7 @@ def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
         ),
     }
 
-    response = openai_chat_completion_message_template(model, message_content, usage)
+    response = openai_chat_completion_message_template(model, message_content, openai_tool_calls, usage)
     return response
 
 
@@ -65,20 +87,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
         openai_tool_calls = None
 
         if tool_calls:
-            openai_tool_calls = []
-            for tool_call in tool_calls:
-                openai_tool_call = {
-                    "index": tool_call.get("index", 0),
-                    "id": tool_call.get("id", f"call_{str(uuid4())}"),
-                    "type": "function",
-                    "function": {
-                        "name": tool_call.get("function", {}).get("name", ""),
-                        "arguments": json.dumps(
-                            tool_call.get("function", {}).get("arguments", {})
-                        ),
-                    },
-                }
-                openai_tool_calls.append(openai_tool_call)
+            openai_tool_calls = convert_ollama_tool_call_to_openai(tool_calls)
 
         done = data.get("done", False)