From 55ab5e60dbb09289f0dea7fdcc29ef4d6e24914c Mon Sep 17 00:00:00 2001 From: Bruce MacDonald Date: Thu, 29 Jun 2023 15:53:13 -0400 Subject: [PATCH] return an error when the model cant be loaded --- ollama/engine.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/ollama/engine.py b/ollama/engine.py index 39a60da9..b45dcd6d 100644 --- a/ollama/engine.py +++ b/ollama/engine.py @@ -45,11 +45,14 @@ def load(model, models_home=".", llms={}): # try loading this as a path to a model, rather than a model name model_path = os.path.abspath(model) - # suppress LLM's output - with suppress_stderr(): - llm = LLM(model_path, verbose=False) - llms.update({model: llm}) - + try: + # suppress LLM's output + with suppress_stderr(): + llm = LLM(model_path, verbose=False) + llms.update({model: llm}) + except Exception as e: + # e is sent to devnull, so create a generic exception + raise Exception(f"Failed to load model: {model}") return llm