소스 검색

Python Formatting (Failed CI - fixed)

DmitriyAlergant-T1A 5 달 전
부모
커밋
374d6cad18
2개의 변경된 파일36개의 추가작업 그리고 21개의 파일을 삭제
  1. 8 4
      backend/open_webui/apps/webui/main.py
  2. 28 17
      backend/open_webui/main.py

+ 8 - 4
backend/open_webui/apps/webui/main.py

@@ -272,7 +272,9 @@ async def get_pipe_models():
                 log.exception(e)
                 log.exception(e)
                 sub_pipes = []
                 sub_pipes = []
 
 
-            log.debug(f"get_pipe_models: function '{pipe.id}' is a manifold of {sub_pipes}")
+            log.debug(
+                f"get_pipe_models: function '{pipe.id}' is a manifold of {sub_pipes}"
+            )
 
 
             for p in sub_pipes:
             for p in sub_pipes:
                 sub_pipe_id = f'{pipe.id}.{p["id"]}'
                 sub_pipe_id = f'{pipe.id}.{p["id"]}'
@@ -282,7 +284,7 @@ async def get_pipe_models():
                     sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
                     sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
 
 
                 pipe_flag = {"type": pipe.type}
                 pipe_flag = {"type": pipe.type}
-                
+
                 pipe_models.append(
                 pipe_models.append(
                     {
                     {
                         "id": sub_pipe_id,
                         "id": sub_pipe_id,
@@ -296,8 +298,10 @@ async def get_pipe_models():
         else:
         else:
             pipe_flag = {"type": "pipe"}
             pipe_flag = {"type": "pipe"}
 
 
-            log.debug(f"get_pipe_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}")
-            
+            log.debug(
+                f"get_pipe_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}"
+            )
+
             pipe_models.append(
             pipe_models.append(
                 {
                 {
                     "id": pipe.id,
                     "id": pipe.id,

+ 28 - 17
backend/open_webui/main.py

@@ -539,7 +539,6 @@ async def chat_completion_files_handler(
         if len(queries) == 0:
         if len(queries) == 0:
             queries = [get_last_user_message(body["messages"])]
             queries = [get_last_user_message(body["messages"])]
 
 
-
         sources = get_sources_from_files(
         sources = get_sources_from_files(
             files=files,
             files=files,
             queries=queries,
             queries=queries,
@@ -969,7 +968,7 @@ app.add_middleware(SecurityHeadersMiddleware)
 @app.middleware("http")
 @app.middleware("http")
 async def commit_session_after_request(request: Request, call_next):
 async def commit_session_after_request(request: Request, call_next):
     response = await call_next(request)
     response = await call_next(request)
-    #log.debug("Commit session after request")
+    # log.debug("Commit session after request")
     Session.commit()
     Session.commit()
     return response
     return response
 
 
@@ -1215,7 +1214,9 @@ async def get_models(user=Depends(get_verified_user)):
                     filtered_models.append(model)
                     filtered_models.append(model)
         models = filtered_models
         models = filtered_models
 
 
-    log.debug(f"/api/models returned filtered models accessible to the user: {json.dumps([model['id'] for model in models])}")
+    log.debug(
+        f"/api/models returned filtered models accessible to the user: {json.dumps([model['id'] for model in models])}"
+    )
 
 
     return {"data": models}
     return {"data": models}
 
 
@@ -1727,7 +1728,9 @@ async def generate_title(form_data: dict, user=Depends(get_verified_user)):
         models,
         models,
     )
     )
 
 
-    log.debug(f"generating chat title using model {task_model_id} for user {user.email} ")
+    log.debug(
+        f"generating chat title using model {task_model_id} for user {user.email} "
+    )
 
 
     if app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE != "":
     if app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE != "":
         template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE
         template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE
@@ -1767,10 +1770,10 @@ Artificial Intelligence in Healthcare
             }
             }
         ),
         ),
         "metadata": {
         "metadata": {
-                "task": str(TASKS.TITLE_GENERATION), 
-                "task_body": form_data,
-                "chat_id": form_data.get("chat_id", None)
-                },
+            "task": str(TASKS.TITLE_GENERATION),
+            "task_body": form_data,
+            "chat_id": form_data.get("chat_id", None),
+        },
     }
     }
 
 
     # Handle pipeline filters
     # Handle pipeline filters
@@ -1820,8 +1823,10 @@ async def generate_chat_tags(form_data: dict, user=Depends(get_verified_user)):
         app.state.config.TASK_MODEL_EXTERNAL,
         app.state.config.TASK_MODEL_EXTERNAL,
         models,
         models,
     )
     )
-    
-    log.debug(f"generating chat tags using model {task_model_id} for user {user.email} ")
+
+    log.debug(
+        f"generating chat tags using model {task_model_id} for user {user.email} "
+    )
 
 
     if app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE != "":
     if app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE != "":
         template = app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE
         template = app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE
@@ -1853,10 +1858,10 @@ JSON format: { "tags": ["tag1", "tag2", "tag3"] }
         "messages": [{"role": "user", "content": content}],
         "messages": [{"role": "user", "content": content}],
         "stream": False,
         "stream": False,
         "metadata": {
         "metadata": {
-            "task": str(TASKS.TAGS_GENERATION), 
+            "task": str(TASKS.TAGS_GENERATION),
             "task_body": form_data,
             "task_body": form_data,
-            "chat_id": form_data.get("chat_id", None)
-            }
+            "chat_id": form_data.get("chat_id", None),
+        },
     }
     }
 
 
     # Handle pipeline filters
     # Handle pipeline filters
@@ -1914,8 +1919,10 @@ async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
         app.state.config.TASK_MODEL_EXTERNAL,
         app.state.config.TASK_MODEL_EXTERNAL,
         models,
         models,
     )
     )
-    
-    log.debug(f"generating {type} queries using model {task_model_id} for user {user.email}")
+
+    log.debug(
+        f"generating {type} queries using model {task_model_id} for user {user.email}"
+    )
 
 
     if app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE != "":
     if app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE != "":
         template = app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE
         template = app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE
@@ -1930,7 +1937,11 @@ async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
         "model": task_model_id,
         "model": task_model_id,
         "messages": [{"role": "user", "content": content}],
         "messages": [{"role": "user", "content": content}],
         "stream": False,
         "stream": False,
-        "metadata": {"task": str(TASKS.QUERY_GENERATION), "task_body": form_data, "chat_id": form_data.get("chat_id", None)},
+        "metadata": {
+            "task": str(TASKS.QUERY_GENERATION),
+            "task_body": form_data,
+            "chat_id": form_data.get("chat_id", None),
+        },
     }
     }
 
 
     # Handle pipeline filters
     # Handle pipeline filters
@@ -2047,7 +2058,7 @@ async def generate_moa_response(form_data: dict, user=Depends(get_verified_user)
         app.state.config.TASK_MODEL_EXTERNAL,
         app.state.config.TASK_MODEL_EXTERNAL,
         models,
         models,
     )
     )
-   
+
     log.debug(f"generating MOA model {task_model_id} for user {user.email} ")
     log.debug(f"generating MOA model {task_model_id} for user {user.email} ")
 
 
     template = """You have been provided with a set of responses from various models to the latest user query: "{{prompt}}"
     template = """You have been provided with a set of responses from various models to the latest user query: "{{prompt}}"