Browse Source

error

jmorganca/ggml-static
Josh Yan 2 years ago
parent
commit
33a65e3ba3
  1. 3
      llm/server.go
  2. 1
      llm/status.go

3
llm/server.go

@ -560,6 +560,9 @@ func (s *llmServer) WaitUntilRunning(ctx context.Context) error {
if s.status != nil && s.status.LastErrMsg != "" {
msg = s.status.LastErrMsg
}
if strings.Contains(msg, "unknown model") {
return fmt.Errorf("this model is not supported by your version of Ollama. You may need to upgrade")
}
return fmt.Errorf("llama runner process has terminated: %v %s", err, msg)
default:
}

1
llm/status.go

@ -25,6 +25,7 @@ var errorPrefixes = []string{
"CUDA error",
"cudaMalloc failed",
"\"ERR\"",
"architecture",
}
func (w *StatusWriter) Write(b []byte) (int, error) {

Loading…
Cancel
Save