Quellcode durchsuchen

fix: error handling

Timothy J. Baek vor 1 Jahr
Ursprung
Commit
be3ab88c88
2 geänderte Dateien mit 14 neuen und 4 gelöschten Zeilen
  1. 13 4
      backend/apps/ollama/main.py
  2. 1 0
      backend/constants.py

+ 13 - 4
backend/apps/ollama/main.py

@@ -123,6 +123,7 @@ async def get_all_models():
             map(lambda response: response["models"], responses)
         )
     }
+
     app.state.MODELS = {model["model"]: model for model in models["models"]}
 
     return models
@@ -181,11 +182,19 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
         responses = await asyncio.gather(*tasks)
         responses = list(filter(lambda x: x is not None, responses))
 
-        lowest_version = min(
-            responses, key=lambda x: tuple(map(int, x["version"].split(".")))
-        )
+        print(responses)
+
+        if len(responses) > 0:
+            lowest_version = min(
+                responses, key=lambda x: tuple(map(int, x["version"].split(".")))
+            )
 
-        return {"version": lowest_version["version"]}
+            return {"version": lowest_version["version"]}
+        else:
+            raise HTTPException(
+                status_code=500,
+                detail=ERROR_MESSAGES.OLLAMA_NOT_FOUND,
+            )
     else:
         url = app.state.OLLAMA_BASE_URLS[url_idx]
         try:

+ 1 - 0
backend/constants.py

@@ -52,3 +52,4 @@ class ERROR_MESSAGES(str, Enum):
 
     MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found"
     OPENAI_NOT_FOUND = lambda name="": f"OpenAI API was not found"
+    OLLAMA_NOT_FOUND = "WebUI could not connect to Ollama"