Pārlūkot izejas kodu

refac: ollama tool calls

Timothy Jaeryang Baek 2 mēneši atpakaļ
vecāks
revīzija
7766a08b70

+ 14 - 5
backend/open_webui/utils/misc.py

@@ -179,15 +179,24 @@ def openai_chat_message_template(model: str):
 
 
 
 
 def openai_chat_chunk_message_template(
 def openai_chat_chunk_message_template(
-    model: str, message: Optional[str] = None, usage: Optional[dict] = None
+    model: str,
+    content: Optional[str] = None,
+    tool_calls: Optional[list[dict]] = None,
+    usage: Optional[dict] = None,
 ) -> dict:
 ) -> dict:
     template = openai_chat_message_template(model)
     template = openai_chat_message_template(model)
     template["object"] = "chat.completion.chunk"
     template["object"] = "chat.completion.chunk"
-    if message:
-        template["choices"][0]["delta"] = {"content": message}
-    else:
+
+    template["choices"][0]["delta"] = {}
+
+    if content:
+        template["choices"][0]["delta"]["content"] = content
+
+    if tool_calls:
+        template["choices"][0]["delta"]["tool_calls"] = tool_calls
+
+    if not content and not tool_calls:
         template["choices"][0]["finish_reason"] = "stop"
         template["choices"][0]["finish_reason"] = "stop"
-        template["choices"][0]["delta"] = {}
 
 
     if usage:
     if usage:
         template["usage"] = usage
         template["usage"] = usage

+ 19 - 1
backend/open_webui/utils/response.py

@@ -1,4 +1,5 @@
 import json
 import json
+from uuid import uuid4
 from open_webui.utils.misc import (
 from open_webui.utils.misc import (
     openai_chat_chunk_message_template,
     openai_chat_chunk_message_template,
     openai_chat_completion_message_template,
     openai_chat_completion_message_template,
@@ -60,6 +61,23 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
 
 
         model = data.get("model", "ollama")
         model = data.get("model", "ollama")
         message_content = data.get("message", {}).get("content", "")
         message_content = data.get("message", {}).get("content", "")
+        tool_calls = data.get("message", {}).get("tool_calls", None)
+        openai_tool_calls = None
+
+        if tool_calls:
+            openai_tool_calls = []
+            for tool_call in tool_calls:
+                openai_tool_call = {
+                    "index": tool_call.get("index", 0),
+                    "id": tool_call.get("id", f"call_{str(uuid4())}"),
+                    "type": "function",
+                    "function": {
+                        "name": tool_call.get("function", {}).get("name", ""),
+                        "arguments": f"{tool_call.get('function', {}).get('arguments', {})}",
+                    },
+                }
+                openai_tool_calls.append(openai_tool_call)
+
         done = data.get("done", False)
         done = data.get("done", False)
 
 
         usage = None
         usage = None
@@ -105,7 +123,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
             }
             }
 
 
         data = openai_chat_chunk_message_template(
         data = openai_chat_chunk_message_template(
-            model, message_content if not done else None, usage
+            model, message_content if not done else None, openai_tool_calls, usage
         )
         )
 
 
         line = f"data: {json.dumps(data)}\n\n"
         line = f"data: {json.dumps(data)}\n\n"