fix model lookup directory for other routes

This commit is contained in:
Jeffrey Morgan 2023-06-25 19:38:35 -04:00
parent 2db9c1bbfc
commit 23c645388c

View file

@ -18,11 +18,11 @@ def load():
if not model: if not model:
return Response("Model is required", status=400) return Response("Model is required", status=400)
if not os.path.exists(f"../models/{model}.bin"): if not os.path.exists(f"./models/{model}.bin"):
return {"error": "The model does not exist."}, 400 return {"error": "The model does not exist."}, 400
if model not in llms: if model not in llms:
llms[model] = Llama(model_path=f"../models/{model}.bin") llms[model] = Llama(model_path=f"./models/{model}.bin")
return Response(status=204) return Response(status=204)
@ -34,7 +34,7 @@ def unload():
if not model: if not model:
return Response("Model is required", status=400) return Response("Model is required", status=400)
if not os.path.exists(f"../models/{model}.bin"): if not os.path.exists(f"./models/{model}.bin"):
return {"error": "The model does not exist."}, 400 return {"error": "The model does not exist."}, 400
llms.pop(model, None) llms.pop(model, None)
@ -57,7 +57,7 @@ def generate():
if model not in llms: if model not in llms:
# auto load # auto load
llms[model] = Llama(model_path=f"../models/{model}.bin") llms[model] = Llama(model_path=f"./models/{model}.bin")
def stream_response(): def stream_response():
stream = llms[model]( stream = llms[model](