Extend api/show and ollama show to return more model info (#4881)
* API Show Extended * Initial Draft of Information Co-Authored-By: Patrick Devine <pdevine@sonic.net> * Clean Up * Descriptive arg error messages and other fixes * Second Draft of Show with Projectors Included * Remove Chat Template * Touches * Prevent wrapping from files * Verbose functionality * Docs * Address Feedback * Lint * Resolve Conflicts * Function Name * Tests for api/show model info * Show Test File * Add Projector Test * Clean routes * Projector Check * Move Show Test * Touches * Doc update --------- Co-authored-by: Patrick Devine <pdevine@sonic.net>
This commit is contained in:
parent
97c59be653
commit
fedf71635e
5 changed files with 243 additions and 30 deletions
|
@ -253,6 +253,7 @@ type ShowRequest struct {
|
|||
Model string `json:"model"`
|
||||
System string `json:"system"`
|
||||
Template string `json:"template"`
|
||||
Verbose bool `json:"verbose"`
|
||||
|
||||
Options map[string]interface{} `json:"options"`
|
||||
|
||||
|
@ -269,6 +270,8 @@ type ShowResponse struct {
|
|||
System string `json:"system,omitempty"`
|
||||
Details ModelDetails `json:"details,omitempty"`
|
||||
Messages []Message `json:"messages,omitempty"`
|
||||
ModelInfo map[string]any `json:"model_info,omitempty"`
|
||||
ProjectorInfo map[string]any `json:"projector_info,omitempty"`
|
||||
ModifiedAt time.Time `json:"modified_at,omitempty"`
|
||||
}
|
||||
|
||||
|
|
121
cmd/cmd.go
121
cmd/cmd.go
|
@ -579,10 +579,6 @@ func ShowHandler(cmd *cobra.Command, args []string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing model name")
|
||||
}
|
||||
|
||||
license, errLicense := cmd.Flags().GetBool("license")
|
||||
modelfile, errModelfile := cmd.Flags().GetBool("modelfile")
|
||||
parameters, errParams := cmd.Flags().GetBool("parameters")
|
||||
|
@ -625,10 +621,9 @@ func ShowHandler(cmd *cobra.Command, args []string) error {
|
|||
|
||||
if flagsSet > 1 {
|
||||
return errors.New("only one of '--license', '--modelfile', '--parameters', '--system', or '--template' can be specified")
|
||||
} else if flagsSet == 0 {
|
||||
return errors.New("one of '--license', '--modelfile', '--parameters', '--system', or '--template' must be specified")
|
||||
}
|
||||
|
||||
if flagsSet == 1 {
|
||||
req := api.ShowRequest{Name: args[0]}
|
||||
resp, err := client.Show(cmd.Context(), &req)
|
||||
if err != nil {
|
||||
|
@ -649,6 +644,120 @@ func ShowHandler(cmd *cobra.Command, args []string) error {
|
|||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
req := api.ShowRequest{Name: args[0]}
|
||||
resp, err := client.Show(cmd.Context(), &req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arch := resp.ModelInfo["general.architecture"].(string)
|
||||
|
||||
modelData := [][]string{
|
||||
{"arch", arch},
|
||||
{"parameters", resp.Details.ParameterSize},
|
||||
{"quantization", resp.Details.QuantizationLevel},
|
||||
{"context length", fmt.Sprintf("%v", resp.ModelInfo[fmt.Sprintf("%s.context_length", arch)].(float64))},
|
||||
{"embedding length", fmt.Sprintf("%v", resp.ModelInfo[fmt.Sprintf("%s.embedding_length", arch)].(float64))},
|
||||
}
|
||||
|
||||
mainTableData := [][]string{
|
||||
{"Model"},
|
||||
{renderSubTable(modelData, false)},
|
||||
}
|
||||
|
||||
if resp.ProjectorInfo != nil {
|
||||
projectorData := [][]string{
|
||||
{"arch", "clip"},
|
||||
{"parameters", format.HumanNumber(uint64(resp.ProjectorInfo["general.parameter_count"].(float64)))},
|
||||
{"projector type", resp.ProjectorInfo["clip.projector_type"].(string)},
|
||||
{"embedding length", fmt.Sprintf("%v", resp.ProjectorInfo["clip.vision.embedding_length"].(float64))},
|
||||
{"projection dimensionality", fmt.Sprintf("%v", resp.ProjectorInfo["clip.vision.projection_dim"].(float64))},
|
||||
}
|
||||
|
||||
mainTableData = append(mainTableData,
|
||||
[]string{"Projector"},
|
||||
[]string{renderSubTable(projectorData, false)},
|
||||
)
|
||||
}
|
||||
|
||||
if resp.Parameters != "" {
|
||||
mainTableData = append(mainTableData, []string{"Parameters"}, []string{formatParams(resp.Parameters)})
|
||||
}
|
||||
|
||||
if resp.System != "" {
|
||||
mainTableData = append(mainTableData, []string{"System"}, []string{renderSubTable(twoLines(resp.System), true)})
|
||||
}
|
||||
|
||||
if resp.License != "" {
|
||||
mainTableData = append(mainTableData, []string{"License"}, []string{renderSubTable(twoLines(resp.License), true)})
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetAutoWrapText(false)
|
||||
table.SetBorder(false)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
|
||||
for _, v := range mainTableData {
|
||||
table.Append(v)
|
||||
}
|
||||
|
||||
table.Render()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func renderSubTable(data [][]string, file bool) string {
|
||||
var buf bytes.Buffer
|
||||
table := tablewriter.NewWriter(&buf)
|
||||
table.SetAutoWrapText(!file)
|
||||
table.SetBorder(false)
|
||||
table.SetNoWhiteSpace(true)
|
||||
table.SetTablePadding("\t")
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
|
||||
for _, v := range data {
|
||||
table.Append(v)
|
||||
}
|
||||
|
||||
table.Render()
|
||||
|
||||
renderedTable := buf.String()
|
||||
lines := strings.Split(renderedTable, "\n")
|
||||
for i, line := range lines {
|
||||
lines[i] = "\t" + line
|
||||
}
|
||||
|
||||
return strings.Join(lines, "\n")
|
||||
}
|
||||
|
||||
func twoLines(s string) [][]string {
|
||||
lines := strings.Split(s, "\n")
|
||||
res := [][]string{}
|
||||
|
||||
count := 0
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line != "" {
|
||||
count++
|
||||
res = append(res, []string{line})
|
||||
if count == 2 {
|
||||
return res
|
||||
}
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func formatParams(s string) string {
|
||||
lines := strings.Split(s, "\n")
|
||||
table := [][]string{}
|
||||
|
||||
for _, line := range lines {
|
||||
table = append(table, strings.Fields(line))
|
||||
}
|
||||
return renderSubTable(table, false)
|
||||
}
|
||||
|
||||
func CopyHandler(cmd *cobra.Command, args []string) error {
|
||||
|
|
37
docs/api.md
37
docs/api.md
|
@ -777,11 +777,12 @@ A single JSON object will be returned.
|
|||
POST /api/show
|
||||
```
|
||||
|
||||
Show information about a model including details, modelfile, template, parameters, license, and system prompt.
|
||||
Show information about a model including details, modelfile, template, parameters, license, system prompt.
|
||||
|
||||
### Parameters
|
||||
|
||||
- `name`: name of the model to show
|
||||
- `verbose`: (optional) if set to `true`, returns full data for verbose response fields
|
||||
|
||||
### Examples
|
||||
|
||||
|
@ -798,14 +799,40 @@ curl http://localhost:11434/api/show -d '{
|
|||
```json
|
||||
{
|
||||
"modelfile": "# Modelfile generated by \"ollama show\"\n# To build a new Modelfile based on this one, replace the FROM line with:\n# FROM llava:latest\n\nFROM /Users/matt/.ollama/models/blobs/sha256:200765e1283640ffbd013184bf496e261032fa75b99498a9613be4e94d63ad52\nTEMPLATE \"\"\"{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: \"\"\"\nPARAMETER num_ctx 4096\nPARAMETER stop \"\u003c/s\u003e\"\nPARAMETER stop \"USER:\"\nPARAMETER stop \"ASSISTANT:\"",
|
||||
"parameters": "num_ctx 4096\nstop \u003c/s\u003e\nstop USER:\nstop ASSISTANT:",
|
||||
"template": "{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: ",
|
||||
"parameters": "num_keep 24\nstop \"<|start_header_id|>\"\nstop \"<|end_header_id|>\"\nstop \"<|eot_id|>\"",
|
||||
"template": "{{ if .System }}<|start_header_id|>system<|end_header_id|>\n\n{{ .System }}<|eot_id|>{{ end }}{{ if .Prompt }}<|start_header_id|>user<|end_header_id|>\n\n{{ .Prompt }}<|eot_id|>{{ end }}<|start_header_id|>assistant<|end_header_id|>\n\n{{ .Response }}<|eot_id|>",
|
||||
"details": {
|
||||
"parent_model": "",
|
||||
"format": "gguf",
|
||||
"family": "llama",
|
||||
"families": ["llama", "clip"],
|
||||
"parameter_size": "7B",
|
||||
"families": [
|
||||
"llama"
|
||||
],
|
||||
"parameter_size": "8.0B",
|
||||
"quantization_level": "Q4_0"
|
||||
},
|
||||
"model_info": {
|
||||
"general.architecture": "llama",
|
||||
"general.file_type": 2,
|
||||
"general.parameter_count": 8030261248,
|
||||
"general.quantization_version": 2,
|
||||
"llama.attention.head_count": 32,
|
||||
"llama.attention.head_count_kv": 8,
|
||||
"llama.attention.layer_norm_rms_epsilon": 0.00001,
|
||||
"llama.block_count": 32,
|
||||
"llama.context_length": 8192,
|
||||
"llama.embedding_length": 4096,
|
||||
"llama.feed_forward_length": 14336,
|
||||
"llama.rope.dimension_count": 128,
|
||||
"llama.rope.freq_base": 500000,
|
||||
"llama.vocab_size": 128256,
|
||||
"tokenizer.ggml.bos_token_id": 128000,
|
||||
"tokenizer.ggml.eos_token_id": 128009,
|
||||
"tokenizer.ggml.merges": [], // populates if `verbose=true`
|
||||
"tokenizer.ggml.model": "gpt2",
|
||||
"tokenizer.ggml.pre": "llama-bpe",
|
||||
"tokenizer.ggml.token_type": [], // populates if `verbose=true`
|
||||
"tokenizer.ggml.tokens": [] // populates if `verbose=true`
|
||||
}
|
||||
}
|
||||
```
|
||||
|
|
|
@ -734,9 +734,44 @@ func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
|
|||
fmt.Fprint(&sb, m.String())
|
||||
resp.Modelfile = sb.String()
|
||||
|
||||
kvData, err := getKVData(m.ModelPath, req.Verbose)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
delete(kvData, "general.name")
|
||||
delete(kvData, "tokenizer.chat_template")
|
||||
resp.ModelInfo = kvData
|
||||
|
||||
if len(m.ProjectorPaths) > 0 {
|
||||
projectorData, err := getKVData(m.ProjectorPaths[0], req.Verbose)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.ProjectorInfo = projectorData
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func getKVData(digest string, verbose bool) (llm.KV, error) {
|
||||
kvData, err := llm.LoadModel(digest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
kv := kvData.KV()
|
||||
|
||||
if !verbose {
|
||||
for k := range kv {
|
||||
if t, ok := kv[k].([]any); len(t) > 5 && ok {
|
||||
kv[k] = []any{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return kv, nil
|
||||
}
|
||||
|
||||
func (s *Server) ListModelsHandler(c *gin.Context) {
|
||||
ms, err := Manifests()
|
||||
if err != nil {
|
||||
|
|
|
@ -19,6 +19,7 @@ import (
|
|||
|
||||
"github.com/ollama/ollama/api"
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
"github.com/ollama/ollama/llm"
|
||||
"github.com/ollama/ollama/parser"
|
||||
"github.com/ollama/ollama/types/model"
|
||||
"github.com/ollama/ollama/version"
|
||||
|
@ -212,6 +213,7 @@ func Test_Routes(t *testing.T) {
|
|||
"top_p 0.9",
|
||||
}
|
||||
assert.Equal(t, expectedParams, params)
|
||||
assert.InDelta(t, 0, showResp.ModelInfo["general.parameter_count"], 1e-9, "Parameter count should be 0")
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -325,3 +327,40 @@ func TestCase(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestShow(t *testing.T) {
|
||||
t.Setenv("OLLAMA_MODELS", t.TempDir())
|
||||
envconfig.LoadConfig()
|
||||
|
||||
var s Server
|
||||
|
||||
createRequest(t, s.CreateModelHandler, api.CreateRequest{
|
||||
Name: "show-model",
|
||||
Modelfile: fmt.Sprintf(
|
||||
"FROM %s\nFROM %s",
|
||||
createBinFile(t, llm.KV{"general.architecture": "test"}, nil),
|
||||
createBinFile(t, llm.KV{"general.architecture": "clip"}, nil),
|
||||
),
|
||||
})
|
||||
|
||||
w := createRequest(t, s.ShowModelHandler, api.ShowRequest{
|
||||
Name: "show-model",
|
||||
})
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected status code 200, actual %d", w.Code)
|
||||
}
|
||||
|
||||
var resp api.ShowResponse
|
||||
if err := json.NewDecoder(w.Body).Decode(&resp); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if resp.ModelInfo["general.architecture"] != "test" {
|
||||
t.Fatal("Expected model architecture to be 'test', but got", resp.ModelInfo["general.architecture"])
|
||||
}
|
||||
|
||||
if resp.ProjectorInfo["general.architecture"] != "clip" {
|
||||
t.Fatal("Expected projector architecture to be 'clip', but got", resp.ProjectorInfo["general.architecture"])
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue