enable cache_prompt by default

This commit is contained in:
Jeffrey Morgan 2023-12-27 14:23:42 -05:00
parent c5f21f73a4
commit d4ebdadbe7
2 changed files with 2 additions and 3 deletions

View file

@ -103,7 +103,6 @@ type Options struct {
MirostatEta float32 `json:"mirostat_eta,omitempty"`
PenalizeNewline bool `json:"penalize_newline,omitempty"`
Stop []string `json:"stop,omitempty"`
Cache bool `json:"cache,omitempty"`
}
// Runner options which must be set when the model is loaded into memory

View file

@ -235,7 +235,7 @@ func predict(llm extServer, opts api.Options, ctx context.Context, predict Predi
"seed": opts.Seed,
"stop": opts.Stop,
"image_data": imageData,
"cache_prompt": opts.Cache,
"cache_prompt": true,
}
if predict.Format == "json" {