diff --git a/llm/llm.go b/llm/llm.go index f2a5e557..d24507cc 100644 --- a/llm/llm.go +++ b/llm/llm.go @@ -33,7 +33,7 @@ func Quantize(infile, outfile string, ftype fileType) error { params.ftype = ftype.Value() if rc := C.llama_model_quantize(cinfile, coutfile, ¶ms); rc != 0 { - return fmt.Errorf("llama_model_quantize: %d", rc) + return fmt.Errorf("failed to quantize model. This model architecture may not be supported, or you may need to upgrade Ollama to the latest version") } return nil