Merge pull request #5421 from ollama/jyan/ver

fix: add unsupported architecture message for linux/windows
This commit is contained in:
Josh 2024-07-01 16:32:14 -07:00 committed by GitHub
commit 0403e9860e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 4 additions and 0 deletions

View file

@ -560,6 +560,9 @@ func (s *llmServer) WaitUntilRunning(ctx context.Context) error {
if s.status != nil && s.status.LastErrMsg != "" {
msg = s.status.LastErrMsg
}
if strings.Contains(msg, "unknown model") {
return fmt.Errorf("this model is not supported by your version of Ollama. You may need to upgrade")
}
return fmt.Errorf("llama runner process has terminated: %v %s", err, msg)
default:
}

View file

@ -25,6 +25,7 @@ var errorPrefixes = []string{
"CUDA error",
"cudaMalloc failed",
"\"ERR\"",
"architecture",
}
func (w *StatusWriter) Write(b []byte) (int, error) {