fix empty response
This commit is contained in:
parent
d1c2558f7e
commit
2bc06565c7
1 changed files with 4 additions and 2 deletions
|
@ -497,8 +497,10 @@ func (llm *llama) Predict(ctx context.Context, prevContext []int, prompt string,
|
||||||
return fmt.Errorf("error unmarshaling llm prediction response: %v", err)
|
return fmt.Errorf("error unmarshaling llm prediction response: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if p.Content != "" {
|
||||||
fn(api.GenerateResponse{Response: p.Content})
|
fn(api.GenerateResponse{Response: p.Content})
|
||||||
nextContext.WriteString(p.Content)
|
nextContext.WriteString(p.Content)
|
||||||
|
}
|
||||||
|
|
||||||
if p.Stop {
|
if p.Stop {
|
||||||
embd, err := llm.Encode(ctx, nextContext.String())
|
embd, err := llm.Encode(ctx, nextContext.String())
|
||||||
|
|
Loading…
Reference in a new issue