Timothy Jaeryang Baek 2 月之前
父节点
当前提交
2b7f9d14d0
共有 2 个文件被更改,包括 65 次插入64 次删除
  1. 53 44
      backend/open_webui/utils/chat.py
  2. 12 20
      src/routes/+layout.svelte

+ 53 - 44
backend/open_webui/utils/chat.py

@@ -10,8 +10,8 @@ import inspect
 import uuid
 import asyncio
 
-from fastapi import Request
-from starlette.responses import Response, StreamingResponse
+from fastapi import Request, status
+from starlette.responses import Response, StreamingResponse, JSONResponse
 
 
 from open_webui.models.users import UserModel
@@ -82,50 +82,61 @@ async def generate_direct_chat_completion(
     if form_data.get("stream"):
         q = asyncio.Queue()
 
-        # Define a generator to stream responses
-        async def event_generator():
-            nonlocal q
-
-            async def message_listener(sid, data):
-                """
-                Handle received socket messages and push them into the queue.
-                """
-                await q.put(data)
-
-            # Register the listener
-            sio.on(channel, message_listener)
-
-            # Start processing chat completion in background
-            await event_emitter(
-                {
-                    "type": "request:chat:completion",
-                    "data": {
-                        "form_data": form_data,
-                        "model": models[form_data["model"]],
-                        "channel": channel,
-                        "session_id": session_id,
-                    },
-                }
-            )
+        async def message_listener(sid, data):
+            """
+            Handle received socket messages and push them into the queue.
+            """
+            await q.put(data)
+
+        # Register the listener
+        sio.on(channel, message_listener)
+
+        # Start processing chat completion in background
+        res = await event_caller(
+            {
+                "type": "request:chat:completion",
+                "data": {
+                    "form_data": form_data,
+                    "model": models[form_data["model"]],
+                    "channel": channel,
+                    "session_id": session_id,
+                },
+            }
+        )
 
-            try:
-                while True:
-                    data = await q.get()  # Wait for new messages
-                    if isinstance(data, dict):
-                        if "error" in data:
-                            raise Exception(data["error"])
+        print("res", res)
 
-                        if "done" in data and data["done"]:
-                            break  # Stop streaming when 'done' is received
+        if res.get("status", False):
+            # Define a generator to stream responses
+            async def event_generator():
+                nonlocal q
+                try:
+                    while True:
+                        data = await q.get()  # Wait for new messages
+                        if isinstance(data, dict):
+                            if "done" in data and data["done"]:
+                                break  # Stop streaming when 'done' is received
+
+                            yield f"data: {json.dumps(data)}\n\n"
+                        elif isinstance(data, str):
+                            yield data
+                except Exception as e:
+                    log.debug(f"Error in event generator: {e}")
+                    pass
 
-                        yield f"data: {json.dumps(data)}\n\n"
-                    elif isinstance(data, str):
-                        yield data
-            finally:
-                del sio.handlers["/"][channel]  # Remove the listener
+            # Define a background task to run the event generator
+            async def background():
+                try:
+                    del sio.handlers["/"][channel]
+                except Exception as e:
+                    pass
 
-        # Return the streaming response
-        return StreamingResponse(event_generator(), media_type="text/event-stream")
+            # Return the streaming response
+            return StreamingResponse(
+                event_generator(), media_type="text/event-stream", background=background
+            )
+        else:
+            raise Exception(str(res))
     else:
         res = await event_caller(
             {
@@ -139,8 +150,6 @@ async def generate_direct_chat_completion(
             }
         )
 
-        print(res)
-
         if "error" in res:
             raise Exception(res["error"])
 

+ 12 - 20
src/routes/+layout.svelte

@@ -279,8 +279,17 @@
 								OPENAI_API_URL
 							);
 
-							if (res && res.ok) {
+							if (res) {
+								// raise if the response is not ok
+								if (!res.ok) {
+									throw await res.json();
+								}
+
 								if (form_data?.stream ?? false) {
+									cb({
+										status: true
+									});
+
 									// res will either be SSE or JSON
 									const reader = res.body.getReader();
 									const decoder = new TextDecoder();
@@ -316,29 +325,12 @@
 							}
 						} catch (error) {
 							console.error('chatCompletion', error);
-
-							if (form_data?.stream ?? false) {
-								$socket.emit(channel, {
-									error: error
-								});
-							} else {
-								cb({
-									error: error
-								});
-							}
+							cb(error);
 						}
 					}
 				} catch (error) {
 					console.error('chatCompletion', error);
-					if (form_data?.stream ?? false) {
-						$socket.emit(channel, {
-							error: error
-						});
-					} else {
-						cb({
-							error: error
-						});
-					}
+					cb(error);
 				} finally {
 					$socket.emit(channel, {
 						done: true