2024-04-12 20:55:12 +00:00
|
|
|
package server
|
|
|
|
|
|
|
|
import (
|
|
|
|
"archive/zip"
|
|
|
|
"bytes"
|
|
|
|
"context"
|
2024-06-20 20:45:47 +00:00
|
|
|
"encoding/json"
|
2024-04-12 20:55:12 +00:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2024-06-12 20:30:08 +00:00
|
|
|
"log/slog"
|
2024-04-12 20:55:12 +00:00
|
|
|
"net/http"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2024-06-20 20:45:47 +00:00
|
|
|
"slices"
|
|
|
|
"strings"
|
|
|
|
"text/template/parse"
|
2024-04-12 20:55:12 +00:00
|
|
|
|
|
|
|
"github.com/ollama/ollama/api"
|
|
|
|
"github.com/ollama/ollama/convert"
|
|
|
|
"github.com/ollama/ollama/llm"
|
2024-06-10 21:54:42 +00:00
|
|
|
"github.com/ollama/ollama/template"
|
2024-04-12 20:55:12 +00:00
|
|
|
"github.com/ollama/ollama/types/model"
|
|
|
|
)
|
|
|
|
|
2024-05-20 21:58:27 +00:00
|
|
|
var intermediateBlobs map[string]string = make(map[string]string)
|
2024-05-10 22:48:41 +00:00
|
|
|
|
2024-06-12 20:30:08 +00:00
|
|
|
type layerGGML struct {
|
2024-08-07 21:22:17 +00:00
|
|
|
Layer
|
2024-04-25 15:53:08 +00:00
|
|
|
*llm.GGML
|
|
|
|
}
|
|
|
|
|
2024-06-12 20:30:08 +00:00
|
|
|
func parseFromModel(ctx context.Context, name model.Name, fn func(api.ProgressResponse)) (layers []*layerGGML, err error) {
|
2024-05-08 21:36:08 +00:00
|
|
|
m, err := ParseNamedManifest(name)
|
2024-04-12 20:55:12 +00:00
|
|
|
switch {
|
|
|
|
case errors.Is(err, os.ErrNotExist):
|
2024-05-01 17:34:39 +00:00
|
|
|
if err := PullModel(ctx, name.String(), ®istryOptions{}, fn); err != nil {
|
2024-04-12 20:55:12 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-05-08 21:36:08 +00:00
|
|
|
m, err = ParseNamedManifest(name)
|
2024-04-23 22:18:45 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-04-12 20:55:12 +00:00
|
|
|
case err != nil:
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-05-08 21:36:08 +00:00
|
|
|
for _, layer := range m.Layers {
|
|
|
|
layer, err := NewLayerFromLayer(layer.Digest, layer.MediaType, name.DisplayShortest())
|
2024-04-12 20:55:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
switch layer.MediaType {
|
|
|
|
case "application/vnd.ollama.image.model",
|
|
|
|
"application/vnd.ollama.image.projector",
|
|
|
|
"application/vnd.ollama.image.adapter":
|
|
|
|
blobpath, err := GetBlobsPath(layer.Digest)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
blob, err := os.Open(blobpath)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer blob.Close()
|
|
|
|
|
2024-06-25 04:47:52 +00:00
|
|
|
ggml, _, err := llm.DecodeGGML(blob, 0)
|
2024-04-12 20:55:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-04-25 15:53:08 +00:00
|
|
|
|
2024-06-12 20:30:08 +00:00
|
|
|
layers = append(layers, &layerGGML{layer, ggml})
|
2024-04-12 20:55:12 +00:00
|
|
|
default:
|
2024-06-12 20:30:08 +00:00
|
|
|
layers = append(layers, &layerGGML{layer, nil})
|
2024-04-12 20:55:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return layers, nil
|
|
|
|
}
|
|
|
|
|
2024-08-23 18:29:56 +00:00
|
|
|
func parseFromZipFile(_ context.Context, command string, baseLayers []*layerGGML, f *os.File, digest string, fn func(api.ProgressResponse)) (layers []*layerGGML, err error) {
|
2024-06-29 23:53:59 +00:00
|
|
|
fi, err := f.Stat()
|
2024-04-12 20:55:12 +00:00
|
|
|
if err != nil {
|
2024-06-29 23:53:59 +00:00
|
|
|
return nil, err
|
2024-04-12 20:55:12 +00:00
|
|
|
}
|
|
|
|
|
2024-06-29 23:53:59 +00:00
|
|
|
r, err := zip.NewReader(f, fi.Size())
|
2024-06-27 04:38:21 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-06-29 23:53:59 +00:00
|
|
|
p, err := os.MkdirTemp(filepath.Dir(f.Name()), "")
|
|
|
|
if err != nil {
|
2024-06-27 04:38:21 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2024-06-29 23:53:59 +00:00
|
|
|
defer os.RemoveAll(p)
|
2024-06-27 04:38:21 +00:00
|
|
|
|
2024-04-12 20:55:12 +00:00
|
|
|
fn(api.ProgressResponse{Status: "converting model"})
|
|
|
|
// TODO(mxyng): this should write directly into a layer
|
|
|
|
// e.g. NewLayer(arch.Reader(), "application/vnd.ollama.image.model")
|
2024-06-29 23:53:59 +00:00
|
|
|
t, err := os.CreateTemp(p, "fp16")
|
2024-04-12 20:55:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-06-29 23:53:59 +00:00
|
|
|
defer t.Close()
|
|
|
|
defer os.Remove(t.Name())
|
2024-04-12 20:55:12 +00:00
|
|
|
|
2024-08-23 18:29:56 +00:00
|
|
|
var layerType string
|
|
|
|
|
|
|
|
switch command {
|
|
|
|
case "adapter":
|
|
|
|
var baseModel *llm.GGML
|
|
|
|
for _, l := range baseLayers {
|
|
|
|
if l.GGML != nil {
|
|
|
|
baseModel = l.GGML
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if baseModel == nil {
|
|
|
|
return nil, fmt.Errorf("no base model specified for the adapter")
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := convert.ConvertAdapter(convert.NewZipReader(r, p, 32<<20), t, baseModel.KV()); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
layerType = "application/vnd.ollama.image.adapter"
|
|
|
|
case "model":
|
|
|
|
if err := convert.ConvertModel(convert.NewZipReader(r, p, 32<<20), t); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
layerType = "application/vnd.ollama.image.model"
|
2024-04-12 20:55:12 +00:00
|
|
|
}
|
|
|
|
|
2024-06-29 23:53:59 +00:00
|
|
|
if _, err := t.Seek(0, io.SeekStart); err != nil {
|
2024-04-12 20:55:12 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-08-23 18:29:56 +00:00
|
|
|
layer, err := NewLayer(t, layerType)
|
2024-04-12 20:55:12 +00:00
|
|
|
if err != nil {
|
2024-05-20 21:58:27 +00:00
|
|
|
return nil, err
|
2024-04-12 20:55:12 +00:00
|
|
|
}
|
|
|
|
|
2024-05-10 22:48:41 +00:00
|
|
|
bin, err := layer.Open()
|
2024-04-12 20:55:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer bin.Close()
|
|
|
|
|
2024-06-25 04:47:52 +00:00
|
|
|
ggml, _, err := llm.DecodeGGML(bin, 0)
|
2024-04-12 20:55:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-06-12 20:30:08 +00:00
|
|
|
layers = append(layers, &layerGGML{layer, ggml})
|
2024-05-10 22:48:41 +00:00
|
|
|
|
2024-05-20 21:58:27 +00:00
|
|
|
intermediateBlobs[digest] = layer.Digest
|
2024-06-12 20:30:08 +00:00
|
|
|
return detectChatTemplate(layers)
|
2024-04-12 20:55:12 +00:00
|
|
|
}
|
|
|
|
|
2024-08-23 18:29:56 +00:00
|
|
|
func parseFromFile(ctx context.Context, command string, baseLayers []*layerGGML, file *os.File, digest string, fn func(api.ProgressResponse)) (layers []*layerGGML, err error) {
|
2024-04-12 20:55:12 +00:00
|
|
|
sr := io.NewSectionReader(file, 0, 512)
|
|
|
|
contentType, err := detectContentType(sr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
switch contentType {
|
|
|
|
case "gguf", "ggla":
|
|
|
|
// noop
|
|
|
|
case "application/zip":
|
2024-08-23 18:29:56 +00:00
|
|
|
return parseFromZipFile(ctx, command, baseLayers, file, digest, fn)
|
2024-04-12 20:55:12 +00:00
|
|
|
default:
|
|
|
|
return nil, fmt.Errorf("unsupported content type: %s", contentType)
|
|
|
|
}
|
|
|
|
|
|
|
|
stat, err := file.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var offset int64
|
|
|
|
for offset < stat.Size() {
|
2024-06-25 04:47:52 +00:00
|
|
|
ggml, n, err := llm.DecodeGGML(file, 0)
|
2024-04-12 20:55:12 +00:00
|
|
|
if errors.Is(err, io.EOF) {
|
|
|
|
break
|
|
|
|
} else if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
mediatype := "application/vnd.ollama.image.model"
|
2024-08-23 18:29:56 +00:00
|
|
|
if ggml.Name() == "ggla" || ggml.KV().Kind() == "adapter" {
|
2024-04-12 20:55:12 +00:00
|
|
|
mediatype = "application/vnd.ollama.image.adapter"
|
|
|
|
} else if ggml.KV().Architecture() == "clip" {
|
|
|
|
mediatype = "application/vnd.ollama.image.projector"
|
|
|
|
}
|
|
|
|
|
2024-08-12 18:46:09 +00:00
|
|
|
var layer Layer
|
|
|
|
if digest != "" && n == stat.Size() && offset == 0 {
|
|
|
|
layer, err = NewLayerFromLayer(digest, mediatype, file.Name())
|
|
|
|
if err != nil {
|
|
|
|
slog.Debug("could not create new layer from layer", "error", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fallback to creating layer from file copy (either NewLayerFromLayer failed, or digest empty/n != stat.Size())
|
|
|
|
if layer.Digest == "" {
|
|
|
|
layer, err = NewLayer(io.NewSectionReader(file, offset, n), mediatype)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-04-12 20:55:12 +00:00
|
|
|
}
|
|
|
|
|
2024-06-12 20:30:08 +00:00
|
|
|
layers = append(layers, &layerGGML{layer, ggml})
|
2024-04-12 20:55:12 +00:00
|
|
|
offset = n
|
|
|
|
}
|
|
|
|
|
2024-06-12 20:30:08 +00:00
|
|
|
return detectChatTemplate(layers)
|
|
|
|
}
|
|
|
|
|
|
|
|
func detectChatTemplate(layers []*layerGGML) ([]*layerGGML, error) {
|
|
|
|
for _, layer := range layers {
|
|
|
|
if s := layer.GGML.KV().ChatTemplate(); s != "" {
|
2024-06-10 21:54:42 +00:00
|
|
|
if t, err := template.Named(s); err != nil {
|
2024-06-12 20:30:08 +00:00
|
|
|
slog.Debug("template detection", "error", err)
|
|
|
|
} else {
|
2024-07-06 00:31:23 +00:00
|
|
|
layer, err := NewLayer(t.Reader(), "application/vnd.ollama.image.template")
|
2024-06-12 20:30:08 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-07-06 00:31:23 +00:00
|
|
|
layer.status = fmt.Sprintf("using autodetected template %s", t.Name)
|
|
|
|
layers = append(layers, &layerGGML{layer, nil})
|
|
|
|
|
|
|
|
if t.Parameters != nil {
|
|
|
|
var b bytes.Buffer
|
|
|
|
if err := json.NewEncoder(&b).Encode(t.Parameters); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
layer, err := NewLayer(&b, "application/vnd.ollama.image.params")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
layers = append(layers, &layerGGML{layer, nil})
|
|
|
|
}
|
2024-06-12 20:30:08 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-12 20:55:12 +00:00
|
|
|
return layers, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func detectContentType(r io.Reader) (string, error) {
|
|
|
|
var b bytes.Buffer
|
|
|
|
if _, err := io.Copy(&b, r); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
if contentType := llm.DetectGGMLType(b.Bytes()); contentType != "" {
|
|
|
|
return contentType, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if contentType := http.DetectContentType(b.Bytes()); contentType != "application/octet-stream" {
|
|
|
|
return contentType, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return "unknown", nil
|
|
|
|
}
|
2024-06-20 20:45:47 +00:00
|
|
|
|
|
|
|
// parseToolCalls attempts to parse a JSON string into a slice of ToolCalls.
|
|
|
|
// mxyng: this only really works if the input contains tool calls in some JSON format
|
|
|
|
func (m *Model) parseToolCalls(s string) ([]api.ToolCall, bool) {
|
|
|
|
// create a subtree from the node that ranges over .ToolCalls
|
|
|
|
tmpl := m.Template.Subtree(func(n parse.Node) bool {
|
|
|
|
if t, ok := n.(*parse.RangeNode); ok {
|
|
|
|
return slices.Contains(template.Identifiers(t.Pipe), "ToolCalls")
|
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
})
|
|
|
|
|
|
|
|
if tmpl == nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
|
|
|
var b bytes.Buffer
|
2024-07-17 22:35:11 +00:00
|
|
|
if err := tmpl.Execute(&b, map[string][]api.ToolCall{
|
2024-06-20 20:45:47 +00:00
|
|
|
"ToolCalls": {
|
|
|
|
{
|
2024-07-17 22:35:11 +00:00
|
|
|
Function: api.ToolCallFunction{
|
|
|
|
Name: "@@name@@",
|
|
|
|
Arguments: api.ToolCallFunctionArguments{
|
|
|
|
"@@argument@@": 1,
|
|
|
|
},
|
2024-06-20 20:45:47 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}); err != nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
2024-07-17 22:35:11 +00:00
|
|
|
var kv map[string]any
|
2024-06-20 20:45:47 +00:00
|
|
|
// execute the subtree with placeholders to identify the keys
|
2024-07-16 16:38:46 +00:00
|
|
|
// trim any commands that might exist in the template
|
|
|
|
if err := json.Unmarshal(bytes.TrimSuffix(b.Bytes(), []byte(",")), &kv); err != nil {
|
2024-06-20 20:45:47 +00:00
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
|
|
|
// find the keys that correspond to the name and arguments fields
|
|
|
|
var name, arguments string
|
|
|
|
for k, v := range kv {
|
2024-07-17 22:35:11 +00:00
|
|
|
switch v.(type) {
|
|
|
|
case string:
|
2024-06-20 20:45:47 +00:00
|
|
|
name = k
|
2024-07-17 22:35:11 +00:00
|
|
|
case map[string]any:
|
2024-06-20 20:45:47 +00:00
|
|
|
arguments = k
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-07-22 16:38:03 +00:00
|
|
|
if name == "" || arguments == "" {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
2024-07-16 16:38:46 +00:00
|
|
|
var objs []map[string]any
|
|
|
|
for offset := 0; offset < len(s); {
|
2024-07-17 18:02:36 +00:00
|
|
|
var obj map[string]any
|
|
|
|
decoder := json.NewDecoder(strings.NewReader(s[offset:]))
|
2024-07-18 19:07:59 +00:00
|
|
|
if err := decoder.Decode(&obj); errors.Is(err, io.EOF) || errors.Is(err, io.ErrUnexpectedEOF) {
|
2024-07-16 16:38:46 +00:00
|
|
|
break
|
|
|
|
} else if syntax := &(json.SyntaxError{}); errors.As(err, &syntax) {
|
|
|
|
// skip over any syntax errors
|
|
|
|
offset += int(syntax.Offset)
|
|
|
|
} else if unmarshalType := &(json.UnmarshalTypeError{}); errors.As(err, &unmarshalType) {
|
|
|
|
// skip over any unmarshalable types
|
|
|
|
offset += int(unmarshalType.Offset)
|
|
|
|
} else if err != nil {
|
2024-07-18 19:07:59 +00:00
|
|
|
slog.Error("parseToolCalls", "error", err)
|
2024-06-20 20:45:47 +00:00
|
|
|
return nil, false
|
2024-07-16 16:38:46 +00:00
|
|
|
} else {
|
2024-07-17 18:02:36 +00:00
|
|
|
offset += int(decoder.InputOffset())
|
2024-07-22 16:38:03 +00:00
|
|
|
|
|
|
|
// collect all nested objects
|
|
|
|
var collect func(any) []map[string]any
|
|
|
|
collect = func(obj any) (all []map[string]any) {
|
|
|
|
switch o := obj.(type) {
|
|
|
|
case map[string]any:
|
|
|
|
all = append(all, o)
|
|
|
|
for _, v := range o {
|
|
|
|
all = append(all, collect(v)...)
|
|
|
|
}
|
|
|
|
case []any:
|
|
|
|
for _, v := range o {
|
|
|
|
all = append(all, collect(v)...)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return all
|
|
|
|
}
|
|
|
|
objs = append(objs, collect(obj)...)
|
2024-06-20 20:45:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var toolCalls []api.ToolCall
|
2024-07-16 16:38:46 +00:00
|
|
|
for _, kv := range objs {
|
2024-07-22 16:38:03 +00:00
|
|
|
n, nok := kv[name].(string)
|
|
|
|
a, aok := kv[arguments].(map[string]any)
|
|
|
|
if nok && aok {
|
|
|
|
toolCalls = append(toolCalls, api.ToolCall{
|
|
|
|
Function: api.ToolCallFunction{
|
|
|
|
Name: n,
|
|
|
|
Arguments: a,
|
|
|
|
},
|
|
|
|
})
|
2024-06-20 20:45:47 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-07-16 16:38:46 +00:00
|
|
|
return toolCalls, len(toolCalls) > 0
|
2024-06-20 20:45:47 +00:00
|
|
|
}
|