Browse Source

fix: temperature not passed correctly

Christoph Holthaus 11 months ago
parent
commit
97b39115a1
2 changed files with 3 additions and 3 deletions
  1. 1 1
      backend/apps/ollama/main.py
  2. 2 2
      backend/apps/openai/main.py

+ 1 - 1
backend/apps/ollama/main.py

@@ -764,7 +764,7 @@ async def generate_chat_completion(
                     "frequency_penalty", None
                     "frequency_penalty", None
                 )
                 )
 
 
-            if model_info.params.get("temperature", None):
+            if model_info.params.get("temperature", None) is not None:
                 payload["options"]["temperature"] = model_info.params.get(
                 payload["options"]["temperature"] = model_info.params.get(
                     "temperature", None
                     "temperature", None
                 )
                 )

+ 2 - 2
backend/apps/openai/main.py

@@ -373,8 +373,8 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
                 model_info.params = model_info.params.model_dump()
                 model_info.params = model_info.params.model_dump()
 
 
                 if model_info.params:
                 if model_info.params:
-                    if model_info.params.get("temperature", None):
-                        payload["temperature"] = int(
+                    if model_info.params.get("temperature", None) is not None:
+                        payload["temperature"] = float(
                             model_info.params.get("temperature")
                             model_info.params.get("temperature")
                         )
                         )