enable cache_prompt
by default
This commit is contained in:
parent
c5f21f73a4
commit
d4ebdadbe7
2 changed files with 2 additions and 3 deletions
|
@ -103,7 +103,6 @@ type Options struct {
|
|||
MirostatEta float32 `json:"mirostat_eta,omitempty"`
|
||||
PenalizeNewline bool `json:"penalize_newline,omitempty"`
|
||||
Stop []string `json:"stop,omitempty"`
|
||||
Cache bool `json:"cache,omitempty"`
|
||||
}
|
||||
|
||||
// Runner options which must be set when the model is loaded into memory
|
||||
|
|
|
@ -235,7 +235,7 @@ func predict(llm extServer, opts api.Options, ctx context.Context, predict Predi
|
|||
"seed": opts.Seed,
|
||||
"stop": opts.Stop,
|
||||
"image_data": imageData,
|
||||
"cache_prompt": opts.Cache,
|
||||
"cache_prompt": true,
|
||||
}
|
||||
|
||||
if predict.Format == "json" {
|
||||
|
|
Loading…
Add table
Reference in a new issue