ollama/api/types.go

107 lines
3.1 KiB
Go
Raw Normal View History

package api
2023-07-06 16:24:49 +00:00
type PullRequest struct {
Model string `json:"model"`
}
2023-07-06 18:18:40 +00:00
type PullProgress struct {
2023-07-06 21:05:55 +00:00
Total int64 `json:"total"`
Completed int64 `json:"completed"`
2023-07-06 18:18:40 +00:00
Percent float64 `json:"percent"`
}
type GenerateRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
2023-07-07 00:09:48 +00:00
ModelOptions *ModelOptions `json:"model_opts,omitempty"`
PredictOptions *PredictOptions `json:"predict_opts,omitempty"`
2023-07-07 00:09:48 +00:00
}
type ModelOptions struct {
2023-07-07 18:24:50 +00:00
ContextSize int `json:"context_size,omitempty"`
Seed int `json:"seed,omitempty"`
NBatch int `json:"n_batch,omitempty"`
F16Memory bool `json:"memory_f16,omitempty"`
MLock bool `json:"mlock,omitempty"`
MMap bool `json:"mmap,omitempty"`
VocabOnly bool `json:"vocab_only,omitempty"`
LowVRAM bool `json:"low_vram,omitempty"`
Embeddings bool `json:"embeddings,omitempty"`
NUMA bool `json:"numa,omitempty"`
NGPULayers int `json:"gpu_layers,omitempty"`
MainGPU string `json:"main_gpu,omitempty"`
TensorSplit string `json:"tensor_split,omitempty"`
2023-07-07 00:09:48 +00:00
}
type PredictOptions struct {
2023-07-07 18:24:50 +00:00
Seed int `json:"seed,omitempty"`
Threads int `json:"threads,omitempty"`
Tokens int `json:"tokens,omitempty"`
TopK int `json:"top_k,omitempty"`
Repeat int `json:"repeat,omitempty"`
Batch int `json:"batch,omitempty"`
NKeep int `json:"nkeep,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Temperature float64 `json:"temp,omitempty"`
Penalty float64 `json:"penalty,omitempty"`
2023-07-07 00:09:48 +00:00
F16KV bool
DebugMode bool
StopPrompts []string
2023-07-07 18:24:50 +00:00
IgnoreEOS bool `json:"ignore_eos,omitempty"`
2023-07-07 00:09:48 +00:00
2023-07-07 18:24:50 +00:00
TailFreeSamplingZ float64 `json:"tfs_z,omitempty"`
TypicalP float64 `json:"typical_p,omitempty"`
FrequencyPenalty float64 `json:"freq_penalty,omitempty"`
PresencePenalty float64 `json:"pres_penalty,omitempty"`
Mirostat int `json:"mirostat,omitempty"`
MirostatETA float64 `json:"mirostat_lr,omitempty"`
MirostatTAU float64 `json:"mirostat_ent,omitempty"`
PenalizeNL bool `json:"penalize_nl,omitempty"`
LogitBias string `json:"logit_bias,omitempty"`
2023-07-07 00:09:48 +00:00
PathPromptCache string
2023-07-07 18:24:50 +00:00
MLock bool `json:"mlock,omitempty"`
MMap bool `json:"mmap,omitempty"`
2023-07-07 00:09:48 +00:00
PromptCacheAll bool
PromptCacheRO bool
MainGPU string
TensorSplit string
}
var DefaultModelOptions ModelOptions = ModelOptions{
2023-07-10 19:24:41 +00:00
ContextSize: 512,
2023-07-07 00:09:48 +00:00
Seed: 0,
F16Memory: true,
MLock: false,
Embeddings: true,
MMap: true,
LowVRAM: false,
}
var DefaultPredictOptions PredictOptions = PredictOptions{
Seed: -1,
Threads: -1,
Tokens: 512,
Penalty: 1.1,
Repeat: 64,
Batch: 512,
NKeep: 64,
TopK: 90,
TopP: 0.86,
TailFreeSamplingZ: 1.0,
TypicalP: 1.0,
Temperature: 0.8,
FrequencyPenalty: 0.0,
PresencePenalty: 0.0,
Mirostat: 0,
MirostatTAU: 5.0,
MirostatETA: 0.1,
MMap: true,
StopPrompts: []string{"llama"},
}
type GenerateResponse struct {
Response string `json:"response"`
}