fix empty response

This commit is contained in:
Michael Yang 2023-09-05 15:03:24 -07:00
parent d1c2558f7e
commit 2bc06565c7

View file

@ -497,8 +497,10 @@ func (llm *llama) Predict(ctx context.Context, prevContext []int, prompt string,
return fmt.Errorf("error unmarshaling llm prediction response: %v", err)
}
fn(api.GenerateResponse{Response: p.Content})
nextContext.WriteString(p.Content)
if p.Content != "" {
fn(api.GenerateResponse{Response: p.Content})
nextContext.WriteString(p.Content)
}
if p.Stop {
embd, err := llm.Encode(ctx, nextContext.String())