Add Cache flag to api (#1642)
This commit is contained in:
parent
db356c8519
commit
10da41d677
3 changed files with 6 additions and 2 deletions
|
@ -103,6 +103,7 @@ type Options struct {
|
||||||
MirostatEta float32 `json:"mirostat_eta,omitempty"`
|
MirostatEta float32 `json:"mirostat_eta,omitempty"`
|
||||||
PenalizeNewline bool `json:"penalize_newline,omitempty"`
|
PenalizeNewline bool `json:"penalize_newline,omitempty"`
|
||||||
Stop []string `json:"stop,omitempty"`
|
Stop []string `json:"stop,omitempty"`
|
||||||
|
Cache bool `json:"cache,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Runner options which must be set when the model is loaded into memory
|
// Runner options which must be set when the model is loaded into memory
|
||||||
|
@ -355,6 +356,7 @@ func DefaultOptions() Options {
|
||||||
MirostatEta: 0.1,
|
MirostatEta: 0.1,
|
||||||
PenalizeNewline: true,
|
PenalizeNewline: true,
|
||||||
Seed: -1,
|
Seed: -1,
|
||||||
|
Cache: true,
|
||||||
|
|
||||||
Runner: Runner{
|
Runner: Runner{
|
||||||
// options set when the model is loaded
|
// options set when the model is loaded
|
||||||
|
|
|
@ -306,7 +306,8 @@ curl http://localhost:11434/api/generate -d '{
|
||||||
"embedding_only": false,
|
"embedding_only": false,
|
||||||
"rope_frequency_base": 1.1,
|
"rope_frequency_base": 1.1,
|
||||||
"rope_frequency_scale": 0.8,
|
"rope_frequency_scale": 0.8,
|
||||||
"num_thread": 8
|
"num_thread": 8,
|
||||||
|
"cache": true
|
||||||
}
|
}
|
||||||
}'
|
}'
|
||||||
```
|
```
|
||||||
|
|
|
@ -235,6 +235,7 @@ func predict(llm extServer, opts api.Options, ctx context.Context, predict Predi
|
||||||
"seed": opts.Seed,
|
"seed": opts.Seed,
|
||||||
"stop": opts.Stop,
|
"stop": opts.Stop,
|
||||||
"image_data": imageData,
|
"image_data": imageData,
|
||||||
|
"cache_prompt": opts.Cache,
|
||||||
}
|
}
|
||||||
|
|
||||||
if predict.Format == "json" {
|
if predict.Format == "json" {
|
||||||
|
|
Loading…
Reference in a new issue