Browse Source

fix model lookup directory for other routes

Jeffrey Morgan 1 year ago
parent
commit
23c645388c
1 changed files with 4 additions and 4 deletions
  1. 4 4
      proto.py

+ 4 - 4
proto.py

@@ -18,11 +18,11 @@ def load():
 
 
     if not model:
     if not model:
         return Response("Model is required", status=400)
         return Response("Model is required", status=400)
-    if not os.path.exists(f"../models/{model}.bin"):
+    if not os.path.exists(f"./models/{model}.bin"):
         return {"error": "The model does not exist."}, 400
         return {"error": "The model does not exist."}, 400
 
 
     if model not in llms:
     if model not in llms:
-        llms[model] = Llama(model_path=f"../models/{model}.bin")
+        llms[model] = Llama(model_path=f"./models/{model}.bin")
 
 
     return Response(status=204)
     return Response(status=204)
 
 
@@ -34,7 +34,7 @@ def unload():
 
 
     if not model:
     if not model:
         return Response("Model is required", status=400)
         return Response("Model is required", status=400)
-    if not os.path.exists(f"../models/{model}.bin"):
+    if not os.path.exists(f"./models/{model}.bin"):
         return {"error": "The model does not exist."}, 400
         return {"error": "The model does not exist."}, 400
 
 
     llms.pop(model, None)
     llms.pop(model, None)
@@ -57,7 +57,7 @@ def generate():
 
 
     if model not in llms:
     if model not in llms:
         # auto load
         # auto load
-        llms[model] = Llama(model_path=f"../models/{model}.bin")
+        llms[model] = Llama(model_path=f"./models/{model}.bin")
 
 
     def stream_response():
     def stream_response():
         stream = llms[model](
         stream = llms[model](