2023-07-03 19:22:44 +00:00
|
|
|
package cmd
|
|
|
|
|
|
|
|
import (
|
2024-03-07 05:01:51 +00:00
|
|
|
"archive/zip"
|
2024-09-11 18:01:30 +00:00
|
|
|
"bufio"
|
2023-11-14 22:07:40 +00:00
|
|
|
"bytes"
|
2023-07-03 19:22:44 +00:00
|
|
|
"context"
|
2023-08-11 17:58:23 +00:00
|
|
|
"crypto/ed25519"
|
|
|
|
"crypto/rand"
|
2023-11-14 22:07:40 +00:00
|
|
|
"crypto/sha256"
|
2023-08-11 17:58:23 +00:00
|
|
|
"encoding/pem"
|
2023-07-06 22:43:04 +00:00
|
|
|
"errors"
|
2023-07-06 16:24:49 +00:00
|
|
|
"fmt"
|
2023-07-18 21:01:19 +00:00
|
|
|
"io"
|
2023-07-03 19:22:44 +00:00
|
|
|
"log"
|
2024-05-14 00:17:36 +00:00
|
|
|
"math"
|
2023-07-03 19:22:44 +00:00
|
|
|
"net"
|
2023-10-30 15:10:18 +00:00
|
|
|
"net/http"
|
2023-07-03 19:22:44 +00:00
|
|
|
"os"
|
2023-09-29 00:13:01 +00:00
|
|
|
"os/signal"
|
2023-07-19 02:34:05 +00:00
|
|
|
"path/filepath"
|
2023-07-31 20:25:57 +00:00
|
|
|
"runtime"
|
2024-09-11 18:01:30 +00:00
|
|
|
"strconv"
|
2023-07-07 17:12:58 +00:00
|
|
|
"strings"
|
2024-08-12 18:46:32 +00:00
|
|
|
"sync/atomic"
|
2023-09-29 00:13:01 +00:00
|
|
|
"syscall"
|
2023-07-06 22:43:04 +00:00
|
|
|
"time"
|
2023-07-03 19:22:44 +00:00
|
|
|
|
2023-11-24 06:21:32 +00:00
|
|
|
"github.com/containerd/console"
|
2024-05-15 23:29:33 +00:00
|
|
|
"github.com/mattn/go-runewidth"
|
2023-07-18 16:09:45 +00:00
|
|
|
"github.com/olekukonko/tablewriter"
|
2023-07-06 20:49:31 +00:00
|
|
|
"github.com/spf13/cobra"
|
2023-08-11 17:58:23 +00:00
|
|
|
"golang.org/x/crypto/ssh"
|
2023-09-22 20:36:08 +00:00
|
|
|
"golang.org/x/term"
|
2023-07-06 20:49:31 +00:00
|
|
|
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/api"
|
2024-05-24 21:57:15 +00:00
|
|
|
"github.com/ollama/ollama/envconfig"
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/format"
|
2024-05-20 18:26:45 +00:00
|
|
|
"github.com/ollama/ollama/parser"
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/progress"
|
|
|
|
"github.com/ollama/ollama/server"
|
2024-11-25 17:40:16 +00:00
|
|
|
"github.com/ollama/ollama/types/model"
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/version"
|
2023-07-03 19:22:44 +00:00
|
|
|
)
|
|
|
|
|
2024-10-22 20:32:24 +00:00
|
|
|
var (
|
|
|
|
errModelNotFound = errors.New("no Modelfile or safetensors files found")
|
|
|
|
errModelfileNotFound = errors.New("specified Modelfile wasn't found")
|
|
|
|
)
|
|
|
|
|
|
|
|
func getModelfileName(cmd *cobra.Command) (string, error) {
|
|
|
|
fn, _ := cmd.Flags().GetString("file")
|
|
|
|
|
|
|
|
filename := fn
|
|
|
|
if filename == "" {
|
|
|
|
filename = "Modelfile"
|
|
|
|
}
|
|
|
|
|
|
|
|
absName, err := filepath.Abs(filename)
|
2023-07-19 02:34:05 +00:00
|
|
|
if err != nil {
|
2024-10-22 20:32:24 +00:00
|
|
|
return "", err
|
2023-07-19 02:34:05 +00:00
|
|
|
}
|
|
|
|
|
2024-10-22 20:32:24 +00:00
|
|
|
_, err = os.Stat(absName)
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
2024-10-22 20:32:24 +00:00
|
|
|
return fn, err
|
2023-08-16 15:03:48 +00:00
|
|
|
}
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2024-10-22 20:32:24 +00:00
|
|
|
return absName, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func CreateHandler(cmd *cobra.Command, args []string) error {
|
2023-11-15 00:33:24 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.Stop()
|
|
|
|
|
2024-10-22 20:32:24 +00:00
|
|
|
var reader io.Reader
|
|
|
|
|
|
|
|
filename, err := getModelfileName(cmd)
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
if filename == "" {
|
|
|
|
reader = strings.NewReader("FROM .\n")
|
|
|
|
} else {
|
|
|
|
return errModelfileNotFound
|
|
|
|
}
|
|
|
|
} else if err != nil {
|
2023-11-14 22:07:40 +00:00
|
|
|
return err
|
2024-10-22 20:32:24 +00:00
|
|
|
} else {
|
|
|
|
f, err := os.Open(filename)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
reader = f
|
|
|
|
defer f.Close()
|
2023-11-14 22:07:40 +00:00
|
|
|
}
|
|
|
|
|
2024-10-22 20:32:24 +00:00
|
|
|
modelfile, err := parser.ParseFile(reader)
|
2023-11-14 22:07:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
home, err := os.UserHomeDir()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-11-19 18:43:21 +00:00
|
|
|
status := "transferring model data"
|
|
|
|
spinner := progress.NewSpinner(status)
|
2023-11-16 00:59:49 +00:00
|
|
|
p.Add(status, spinner)
|
2024-08-12 18:46:32 +00:00
|
|
|
defer p.Stop()
|
2023-11-16 00:59:49 +00:00
|
|
|
|
2024-10-22 20:32:24 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-04-30 17:55:19 +00:00
|
|
|
for i := range modelfile.Commands {
|
|
|
|
switch modelfile.Commands[i].Name {
|
2023-11-14 22:07:40 +00:00
|
|
|
case "model", "adapter":
|
2024-04-30 17:55:19 +00:00
|
|
|
path := modelfile.Commands[i].Args
|
2023-11-14 22:07:40 +00:00
|
|
|
if path == "~" {
|
|
|
|
path = home
|
|
|
|
} else if strings.HasPrefix(path, "~/") {
|
|
|
|
path = filepath.Join(home, path[2:])
|
|
|
|
}
|
|
|
|
|
2023-11-20 21:43:48 +00:00
|
|
|
if !filepath.IsAbs(path) {
|
|
|
|
path = filepath.Join(filepath.Dir(filename), path)
|
|
|
|
}
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
fi, err := os.Stat(path)
|
2024-04-30 17:55:19 +00:00
|
|
|
if errors.Is(err, os.ErrNotExist) && modelfile.Commands[i].Name == "model" {
|
2023-11-15 18:57:09 +00:00
|
|
|
continue
|
2023-11-14 22:07:40 +00:00
|
|
|
} else if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
if fi.IsDir() {
|
2024-04-22 18:02:25 +00:00
|
|
|
// this is likely a safetensors or pytorch directory
|
|
|
|
// TODO make this work w/ adapters
|
|
|
|
tempfile, err := tempZipFiles(path)
|
2024-03-07 05:01:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-04-22 18:02:25 +00:00
|
|
|
defer os.RemoveAll(tempfile)
|
2024-04-15 18:26:42 +00:00
|
|
|
|
2024-04-22 18:02:25 +00:00
|
|
|
path = tempfile
|
2023-11-14 22:07:40 +00:00
|
|
|
}
|
|
|
|
|
2024-08-12 18:46:32 +00:00
|
|
|
digest, err := createBlob(cmd, client, path, spinner)
|
2024-03-07 05:01:51 +00:00
|
|
|
if err != nil {
|
2023-11-14 22:07:40 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-04-30 17:55:19 +00:00
|
|
|
modelfile.Commands[i].Args = "@" + digest
|
2023-11-14 22:07:40 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-17 21:14:41 +00:00
|
|
|
|
2024-04-22 18:02:25 +00:00
|
|
|
bars := make(map[string]*progress.Bar)
|
2023-07-25 18:25:13 +00:00
|
|
|
fn := func(resp api.ProgressResponse) error {
|
2023-11-15 00:33:24 +00:00
|
|
|
if resp.Digest != "" {
|
|
|
|
spinner.Stop()
|
|
|
|
|
|
|
|
bar, ok := bars[resp.Digest]
|
|
|
|
if !ok {
|
2023-11-20 16:37:17 +00:00
|
|
|
bar = progress.NewBar(fmt.Sprintf("pulling %s...", resp.Digest[7:19]), resp.Total, resp.Completed)
|
2023-11-15 00:33:24 +00:00
|
|
|
bars[resp.Digest] = bar
|
|
|
|
p.Add(resp.Digest, bar)
|
|
|
|
}
|
|
|
|
|
|
|
|
bar.Set(resp.Completed)
|
|
|
|
} else if status != resp.Status {
|
|
|
|
spinner.Stop()
|
|
|
|
|
|
|
|
status = resp.Status
|
|
|
|
spinner = progress.NewSpinner(status)
|
|
|
|
p.Add(status, spinner)
|
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-05-10 20:06:13 +00:00
|
|
|
quantize, _ := cmd.Flags().GetString("quantize")
|
2024-04-05 15:49:04 +00:00
|
|
|
|
2024-05-10 20:06:13 +00:00
|
|
|
request := api.CreateRequest{Name: args[0], Modelfile: modelfile.String(), Quantize: quantize}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Create(cmd.Context(), &request, fn); err != nil {
|
2023-07-17 00:02:22 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-04-22 18:02:25 +00:00
|
|
|
func tempZipFiles(path string) (string, error) {
|
|
|
|
tempfile, err := os.CreateTemp("", "ollama-tf")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer tempfile.Close()
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
detectContentType := func(path string) (string, error) {
|
|
|
|
f, err := os.Open(path)
|
2024-04-22 18:02:25 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2024-04-25 21:41:30 +00:00
|
|
|
defer f.Close()
|
2024-04-22 18:02:25 +00:00
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
var b bytes.Buffer
|
|
|
|
b.Grow(512)
|
2024-04-22 18:02:25 +00:00
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
if _, err := io.CopyN(&b, f, 512); err != nil && !errors.Is(err, io.EOF) {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
contentType, _, _ := strings.Cut(http.DetectContentType(b.Bytes()), ";")
|
|
|
|
return contentType, nil
|
2024-04-22 18:02:25 +00:00
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
glob := func(pattern, contentType string) ([]string, error) {
|
|
|
|
matches, err := filepath.Glob(pattern)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, safetensor := range matches {
|
|
|
|
if ct, err := detectContentType(safetensor); err != nil {
|
|
|
|
return nil, err
|
|
|
|
} else if ct != contentType {
|
|
|
|
return nil, fmt.Errorf("invalid content type: expected %s for %s", ct, safetensor)
|
2024-04-22 18:02:25 +00:00
|
|
|
}
|
2024-04-25 21:41:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return matches, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var files []string
|
|
|
|
if st, _ := glob(filepath.Join(path, "model*.safetensors"), "application/octet-stream"); len(st) > 0 {
|
|
|
|
// safetensors files might be unresolved git lfs references; skip if they are
|
|
|
|
// covers model-x-of-y.safetensors, model.fp32-x-of-y.safetensors, model.safetensors
|
|
|
|
files = append(files, st...)
|
2024-08-23 18:29:56 +00:00
|
|
|
} else if st, _ := glob(filepath.Join(path, "adapters.safetensors"), "application/octet-stream"); len(st) > 0 {
|
|
|
|
// covers adapters.safetensors
|
|
|
|
files = append(files, st...)
|
|
|
|
} else if st, _ := glob(filepath.Join(path, "adapter_model.safetensors"), "application/octet-stream"); len(st) > 0 {
|
|
|
|
// covers adapter_model.safetensors
|
|
|
|
files = append(files, st...)
|
2024-04-25 21:41:30 +00:00
|
|
|
} else if pt, _ := glob(filepath.Join(path, "pytorch_model*.bin"), "application/zip"); len(pt) > 0 {
|
|
|
|
// pytorch files might also be unresolved git lfs references; skip if they are
|
|
|
|
// covers pytorch_model-x-of-y.bin, pytorch_model.fp32-x-of-y.bin, pytorch_model.bin
|
|
|
|
files = append(files, pt...)
|
2024-05-08 23:07:46 +00:00
|
|
|
} else if pt, _ := glob(filepath.Join(path, "consolidated*.pth"), "application/zip"); len(pt) > 0 {
|
2024-04-25 21:41:30 +00:00
|
|
|
// pytorch files might also be unresolved git lfs references; skip if they are
|
|
|
|
// covers consolidated.x.pth, consolidated.pth
|
|
|
|
files = append(files, pt...)
|
|
|
|
} else {
|
2024-10-22 20:32:24 +00:00
|
|
|
return "", errModelNotFound
|
2024-04-25 21:41:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// add configuration files, json files are detected as text/plain
|
|
|
|
js, err := glob(filepath.Join(path, "*.json"), "text/plain")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
files = append(files, js...)
|
|
|
|
|
2024-06-07 21:55:56 +00:00
|
|
|
// bert models require a nested config.json
|
|
|
|
// TODO(mxyng): merge this with the glob above
|
|
|
|
js, err = glob(filepath.Join(path, "**/*.json"), "text/plain")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
files = append(files, js...)
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
if tks, _ := glob(filepath.Join(path, "tokenizer.model"), "application/octet-stream"); len(tks) > 0 {
|
|
|
|
// add tokenizer.model if it exists, tokenizer.json is automatically picked up by the previous glob
|
|
|
|
// tokenizer.model might be a unresolved git lfs reference; error if it is
|
|
|
|
files = append(files, tks...)
|
|
|
|
} else if tks, _ := glob(filepath.Join(path, "**/tokenizer.model"), "text/plain"); len(tks) > 0 {
|
|
|
|
// some times tokenizer.model is in a subdirectory (e.g. meta-llama/Meta-Llama-3-8B)
|
|
|
|
files = append(files, tks...)
|
|
|
|
}
|
|
|
|
|
2024-06-27 04:38:21 +00:00
|
|
|
zipfile := zip.NewWriter(tempfile)
|
|
|
|
defer zipfile.Close()
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
for _, file := range files {
|
|
|
|
f, err := os.Open(file)
|
|
|
|
if err != nil {
|
2024-04-22 18:02:25 +00:00
|
|
|
return "", err
|
|
|
|
}
|
2024-04-25 21:41:30 +00:00
|
|
|
defer f.Close()
|
2024-04-22 18:02:25 +00:00
|
|
|
|
|
|
|
fi, err := f.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
zfi, err := zip.FileInfoHeader(fi)
|
2024-04-22 18:02:25 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2024-06-07 21:55:56 +00:00
|
|
|
zfi.Name, err = filepath.Rel(path, file)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
zf, err := zipfile.CreateHeader(zfi)
|
2024-04-22 18:02:25 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
if _, err := io.Copy(zf, f); err != nil {
|
2024-04-22 18:02:25 +00:00
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return tempfile.Name(), nil
|
|
|
|
}
|
|
|
|
|
2024-08-12 18:46:32 +00:00
|
|
|
func createBlob(cmd *cobra.Command, client *api.Client, path string, spinner *progress.Spinner) (string, error) {
|
2024-03-07 05:01:51 +00:00
|
|
|
bin, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer bin.Close()
|
|
|
|
|
2024-08-12 18:46:32 +00:00
|
|
|
// Get file info to retrieve the size
|
|
|
|
fileInfo, err := bin.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
fileSize := fileInfo.Size()
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
hash := sha256.New()
|
|
|
|
if _, err := io.Copy(hash, bin); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2024-03-29 01:54:01 +00:00
|
|
|
|
|
|
|
if _, err := bin.Seek(0, io.SeekStart); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2024-03-07 05:01:51 +00:00
|
|
|
|
2024-08-12 18:46:32 +00:00
|
|
|
var pw progressWriter
|
|
|
|
status := "transferring model data 0%"
|
|
|
|
spinner.SetMessage(status)
|
|
|
|
|
|
|
|
done := make(chan struct{})
|
|
|
|
defer close(done)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
ticker := time.NewTicker(60 * time.Millisecond)
|
|
|
|
defer ticker.Stop()
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-ticker.C:
|
|
|
|
spinner.SetMessage(fmt.Sprintf("transferring model data %d%%", int(100*pw.n.Load()/fileSize)))
|
|
|
|
case <-done:
|
|
|
|
spinner.SetMessage("transferring model data 100%")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
digest := fmt.Sprintf("sha256:%x", hash.Sum(nil))
|
2024-08-12 18:46:32 +00:00
|
|
|
if err = client.CreateBlob(cmd.Context(), digest, io.TeeReader(bin, &pw)); err != nil {
|
2024-03-07 05:01:51 +00:00
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return digest, nil
|
|
|
|
}
|
|
|
|
|
2024-08-12 18:46:32 +00:00
|
|
|
type progressWriter struct {
|
|
|
|
n atomic.Int64
|
|
|
|
}
|
|
|
|
|
|
|
|
func (w *progressWriter) Write(p []byte) (n int, err error) {
|
|
|
|
w.n.Add(int64(len(p)))
|
|
|
|
return len(p), nil
|
|
|
|
}
|
|
|
|
|
2024-09-11 23:36:21 +00:00
|
|
|
func loadOrUnloadModel(cmd *cobra.Command, opts *runOptions) error {
|
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.StopAndClear()
|
|
|
|
|
|
|
|
spinner := progress.NewSpinner("")
|
|
|
|
p.Add("", spinner)
|
|
|
|
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
req := &api.GenerateRequest{
|
|
|
|
Model: opts.Model,
|
|
|
|
KeepAlive: opts.KeepAlive,
|
|
|
|
}
|
|
|
|
|
|
|
|
return client.Generate(cmd.Context(), req, func(api.GenerateResponse) error { return nil })
|
|
|
|
}
|
|
|
|
|
|
|
|
func StopHandler(cmd *cobra.Command, args []string) error {
|
|
|
|
opts := &runOptions{
|
|
|
|
Model: args[0],
|
|
|
|
KeepAlive: &api.Duration{Duration: 0},
|
|
|
|
}
|
|
|
|
if err := loadOrUnloadModel(cmd, opts); err != nil {
|
|
|
|
if strings.Contains(err.Error(), "not found") {
|
|
|
|
return fmt.Errorf("couldn't find model \"%s\" to stop", args[0])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-25 16:07:27 +00:00
|
|
|
func RunHandler(cmd *cobra.Command, args []string) error {
|
2024-02-02 05:33:06 +00:00
|
|
|
interactive := true
|
|
|
|
|
|
|
|
opts := runOptions{
|
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 03:14:03 +00:00
|
|
|
Model: args[0],
|
|
|
|
WordWrap: os.Getenv("TERM") == "xterm-256color",
|
|
|
|
Options: map[string]interface{}{},
|
2024-02-02 05:33:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
format, err := cmd.Flags().GetString("format")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
opts.Format = format
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
keepAlive, err := cmd.Flags().GetString("keepalive")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if keepAlive != "" {
|
|
|
|
d, err := time.ParseDuration(keepAlive)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
opts.KeepAlive = &api.Duration{Duration: d}
|
|
|
|
}
|
|
|
|
|
2024-02-02 05:33:06 +00:00
|
|
|
prompts := args[1:]
|
|
|
|
// prepend stdin to the prompt if provided
|
|
|
|
if !term.IsTerminal(int(os.Stdin.Fd())) {
|
|
|
|
in, err := io.ReadAll(os.Stdin)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
prompts = append([]string{string(in)}, prompts...)
|
|
|
|
opts.WordWrap = false
|
|
|
|
interactive = false
|
|
|
|
}
|
|
|
|
opts.Prompt = strings.Join(prompts, " ")
|
|
|
|
if len(prompts) > 0 {
|
|
|
|
interactive = false
|
|
|
|
}
|
2024-11-22 16:04:54 +00:00
|
|
|
// Be quiet if we're redirecting to a pipe or file
|
|
|
|
if !term.IsTerminal(int(os.Stdout.Fd())) {
|
|
|
|
interactive = false
|
|
|
|
}
|
2024-02-02 05:33:06 +00:00
|
|
|
|
|
|
|
nowrap, err := cmd.Flags().GetBool("nowordwrap")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
opts.WordWrap = !nowrap
|
|
|
|
|
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 03:14:03 +00:00
|
|
|
// Fill out the rest of the options based on information about the
|
|
|
|
// model.
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
name := args[0]
|
|
|
|
info, err := func() (*api.ShowResponse, error) {
|
|
|
|
showReq := &api.ShowRequest{Name: name}
|
|
|
|
info, err := client.Show(cmd.Context(), showReq)
|
|
|
|
var se api.StatusError
|
|
|
|
if errors.As(err, &se) && se.StatusCode == http.StatusNotFound {
|
|
|
|
if err := PullHandler(cmd, []string{name}); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return client.Show(cmd.Context(), &api.ShowRequest{Name: name})
|
|
|
|
}
|
|
|
|
return info, err
|
|
|
|
}()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2024-02-02 05:33:06 +00:00
|
|
|
}
|
|
|
|
|
2024-10-18 23:12:35 +00:00
|
|
|
opts.MultiModal = len(info.ProjectorInfo) != 0
|
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 03:14:03 +00:00
|
|
|
opts.ParentModel = info.Details.ParentModel
|
|
|
|
|
|
|
|
if interactive {
|
2024-09-11 23:36:21 +00:00
|
|
|
if err := loadOrUnloadModel(cmd, &opts); err != nil {
|
2024-07-26 20:39:38 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, msg := range info.Messages {
|
|
|
|
switch msg.Role {
|
|
|
|
case "user":
|
|
|
|
fmt.Printf(">>> %s\n", msg.Content)
|
|
|
|
case "assistant":
|
|
|
|
state := &displayResponseState{}
|
|
|
|
displayResponse(msg.Content, opts.WordWrap, state)
|
|
|
|
fmt.Println()
|
|
|
|
fmt.Println()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 03:14:03 +00:00
|
|
|
return generateInteractive(cmd, opts)
|
|
|
|
}
|
|
|
|
return generate(cmd, opts)
|
2023-07-06 18:18:40 +00:00
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func PushHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2023-07-21 22:42:19 +00:00
|
|
|
insecure, err := cmd.Flags().GetBool("insecure")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.Stop()
|
|
|
|
|
|
|
|
bars := make(map[string]*progress.Bar)
|
2023-11-19 18:43:21 +00:00
|
|
|
var status string
|
|
|
|
var spinner *progress.Spinner
|
2023-11-15 00:33:24 +00:00
|
|
|
|
2023-07-19 01:51:30 +00:00
|
|
|
fn := func(resp api.ProgressResponse) error {
|
2023-11-15 00:33:24 +00:00
|
|
|
if resp.Digest != "" {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
bar, ok := bars[resp.Digest]
|
|
|
|
if !ok {
|
2023-11-20 16:37:17 +00:00
|
|
|
bar = progress.NewBar(fmt.Sprintf("pushing %s...", resp.Digest[7:19]), resp.Total, resp.Completed)
|
2023-11-15 00:33:24 +00:00
|
|
|
bars[resp.Digest] = bar
|
|
|
|
p.Add(resp.Digest, bar)
|
|
|
|
}
|
|
|
|
|
|
|
|
bar.Set(resp.Completed)
|
|
|
|
} else if status != resp.Status {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
status = resp.Status
|
|
|
|
spinner = progress.NewSpinner(status)
|
|
|
|
p.Add(status, spinner)
|
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
request := api.PushRequest{Name: args[0], Insecure: insecure}
|
2024-11-25 17:40:16 +00:00
|
|
|
|
|
|
|
n := model.ParseName(args[0])
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Push(cmd.Context(), &request, fn); err != nil {
|
2024-04-30 18:02:08 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
|
|
|
if strings.Contains(err.Error(), "access denied") {
|
|
|
|
return errors.New("you are not authorized to push to this namespace, create the model under a namespace you own")
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-11-25 17:40:16 +00:00
|
|
|
p.Stop()
|
2023-11-16 00:59:49 +00:00
|
|
|
spinner.Stop()
|
2024-11-25 17:40:16 +00:00
|
|
|
|
|
|
|
destination := n.String()
|
|
|
|
if strings.HasSuffix(n.Host, ".ollama.ai") || strings.HasSuffix(n.Host, ".ollama.com") {
|
|
|
|
destination = "https://ollama.com/" + strings.TrimSuffix(n.DisplayShortest(), ":latest")
|
|
|
|
}
|
|
|
|
fmt.Printf("\nYou can find your model at:\n\n")
|
|
|
|
fmt.Printf("\t%s\n", destination)
|
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
return nil
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func ListHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-18 16:09:45 +00:00
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
models, err := client.List(cmd.Context())
|
2023-07-18 16:09:45 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var data [][]string
|
|
|
|
|
|
|
|
for _, m := range models.Models {
|
2023-07-18 21:01:19 +00:00
|
|
|
if len(args) == 0 || strings.HasPrefix(m.Name, args[0]) {
|
2023-11-14 22:57:41 +00:00
|
|
|
data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), format.HumanTime(m.ModifiedAt, "Never")})
|
2023-07-18 21:01:19 +00:00
|
|
|
}
|
2023-07-18 16:09:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
table := tablewriter.NewWriter(os.Stdout)
|
2023-08-29 03:50:24 +00:00
|
|
|
table.SetHeader([]string{"NAME", "ID", "SIZE", "MODIFIED"})
|
2023-07-18 16:09:45 +00:00
|
|
|
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetHeaderLine(false)
|
|
|
|
table.SetBorder(false)
|
|
|
|
table.SetNoWhiteSpace(true)
|
2024-09-11 18:01:30 +00:00
|
|
|
table.SetTablePadding(" ")
|
2023-07-18 16:09:45 +00:00
|
|
|
table.AppendBulk(data)
|
|
|
|
table.Render()
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
func ListRunningHandler(cmd *cobra.Command, args []string) error {
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
models, err := client.ListRunning(cmd.Context())
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var data [][]string
|
|
|
|
|
|
|
|
for _, m := range models.Models {
|
|
|
|
if len(args) == 0 || strings.HasPrefix(m.Name, args[0]) {
|
|
|
|
var procStr string
|
|
|
|
switch {
|
|
|
|
case m.SizeVRAM == 0:
|
|
|
|
procStr = "100% CPU"
|
|
|
|
case m.SizeVRAM == m.Size:
|
|
|
|
procStr = "100% GPU"
|
|
|
|
case m.SizeVRAM > m.Size || m.Size == 0:
|
|
|
|
procStr = "Unknown"
|
|
|
|
default:
|
|
|
|
sizeCPU := m.Size - m.SizeVRAM
|
|
|
|
cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 100)
|
|
|
|
procStr = fmt.Sprintf("%d%%/%d%% CPU/GPU", int(cpuPercent), int(100-cpuPercent))
|
|
|
|
}
|
2024-09-11 23:36:21 +00:00
|
|
|
|
|
|
|
var until string
|
|
|
|
delta := time.Since(m.ExpiresAt)
|
|
|
|
if delta > 0 {
|
|
|
|
until = "Stopping..."
|
|
|
|
} else {
|
|
|
|
until = format.HumanTime(m.ExpiresAt, "Never")
|
|
|
|
}
|
|
|
|
data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), procStr, until})
|
2024-05-14 00:17:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
table := tablewriter.NewWriter(os.Stdout)
|
|
|
|
table.SetHeader([]string{"NAME", "ID", "SIZE", "PROCESSOR", "UNTIL"})
|
|
|
|
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetHeaderLine(false)
|
|
|
|
table.SetBorder(false)
|
|
|
|
table.SetNoWhiteSpace(true)
|
2024-09-11 18:01:30 +00:00
|
|
|
table.SetTablePadding(" ")
|
2024-05-14 00:17:36 +00:00
|
|
|
table.AppendBulk(data)
|
|
|
|
table.Render()
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func DeleteHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-20 23:09:23 +00:00
|
|
|
|
2024-10-01 22:45:43 +00:00
|
|
|
// Unload the model if it's running before deletion
|
|
|
|
opts := &runOptions{
|
|
|
|
Model: args[0],
|
|
|
|
KeepAlive: &api.Duration{Duration: 0},
|
|
|
|
}
|
|
|
|
if err := loadOrUnloadModel(cmd, opts); err != nil {
|
|
|
|
if !strings.Contains(err.Error(), "not found") {
|
|
|
|
return fmt.Errorf("unable to stop existing running model \"%s\": %s", args[0], err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-26 07:47:56 +00:00
|
|
|
for _, name := range args {
|
|
|
|
req := api.DeleteRequest{Name: name}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Delete(cmd.Context(), &req); err != nil {
|
2023-08-26 07:47:56 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
fmt.Printf("deleted '%s'\n", name)
|
2023-07-20 23:09:23 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-09-06 18:04:17 +00:00
|
|
|
func ShowHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-09-06 18:04:17 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
license, errLicense := cmd.Flags().GetBool("license")
|
|
|
|
modelfile, errModelfile := cmd.Flags().GetBool("modelfile")
|
|
|
|
parameters, errParams := cmd.Flags().GetBool("parameters")
|
|
|
|
system, errSystem := cmd.Flags().GetBool("system")
|
|
|
|
template, errTemplate := cmd.Flags().GetBool("template")
|
|
|
|
|
|
|
|
for _, boolErr := range []error{errLicense, errModelfile, errParams, errSystem, errTemplate} {
|
|
|
|
if boolErr != nil {
|
|
|
|
return errors.New("error retrieving flags")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
flagsSet := 0
|
|
|
|
showType := ""
|
|
|
|
|
|
|
|
if license {
|
|
|
|
flagsSet++
|
|
|
|
showType = "license"
|
|
|
|
}
|
|
|
|
|
|
|
|
if modelfile {
|
|
|
|
flagsSet++
|
|
|
|
showType = "modelfile"
|
|
|
|
}
|
|
|
|
|
|
|
|
if parameters {
|
|
|
|
flagsSet++
|
|
|
|
showType = "parameters"
|
|
|
|
}
|
|
|
|
|
|
|
|
if system {
|
|
|
|
flagsSet++
|
|
|
|
showType = "system"
|
|
|
|
}
|
|
|
|
|
|
|
|
if template {
|
|
|
|
flagsSet++
|
|
|
|
showType = "template"
|
|
|
|
}
|
|
|
|
|
|
|
|
if flagsSet > 1 {
|
2023-09-06 20:38:49 +00:00
|
|
|
return errors.New("only one of '--license', '--modelfile', '--parameters', '--system', or '--template' can be specified")
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
|
2024-06-28 20:15:52 +00:00
|
|
|
req := api.ShowRequest{Name: args[0]}
|
|
|
|
resp, err := client.Show(cmd.Context(), &req)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-06-19 21:19:02 +00:00
|
|
|
|
2024-06-28 20:15:52 +00:00
|
|
|
if flagsSet == 1 {
|
2024-06-19 21:19:02 +00:00
|
|
|
switch showType {
|
|
|
|
case "license":
|
|
|
|
fmt.Println(resp.License)
|
|
|
|
case "modelfile":
|
|
|
|
fmt.Println(resp.Modelfile)
|
|
|
|
case "parameters":
|
|
|
|
fmt.Println(resp.Parameters)
|
|
|
|
case "system":
|
2024-11-14 07:53:30 +00:00
|
|
|
fmt.Print(resp.System)
|
2024-06-19 21:19:02 +00:00
|
|
|
case "template":
|
2024-11-14 07:53:30 +00:00
|
|
|
fmt.Print(resp.Template)
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2023-09-06 18:04:17 +00:00
|
|
|
}
|
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
return showInfo(resp, os.Stdout)
|
2024-06-28 20:15:52 +00:00
|
|
|
}
|
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
func showInfo(resp *api.ShowResponse, w io.Writer) error {
|
|
|
|
tableRender := func(header string, rows func() [][]string) {
|
|
|
|
fmt.Fprintln(w, " ", header)
|
|
|
|
table := tablewriter.NewWriter(w)
|
|
|
|
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetBorder(false)
|
|
|
|
table.SetNoWhiteSpace(true)
|
|
|
|
table.SetTablePadding(" ")
|
2024-06-19 21:19:02 +00:00
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
switch header {
|
|
|
|
case "Template", "System", "License":
|
|
|
|
table.SetColWidth(100)
|
2024-06-28 18:30:16 +00:00
|
|
|
}
|
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
table.AppendBulk(rows())
|
|
|
|
table.Render()
|
|
|
|
fmt.Fprintln(w)
|
2023-09-06 18:04:17 +00:00
|
|
|
}
|
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
tableRender("Model", func() (rows [][]string) {
|
|
|
|
if resp.ModelInfo != nil {
|
|
|
|
arch := resp.ModelInfo["general.architecture"].(string)
|
|
|
|
rows = append(rows, []string{"", "architecture", arch})
|
|
|
|
rows = append(rows, []string{"", "parameters", format.HumanNumber(uint64(resp.ModelInfo["general.parameter_count"].(float64)))})
|
|
|
|
rows = append(rows, []string{"", "context length", strconv.FormatFloat(resp.ModelInfo[fmt.Sprintf("%s.context_length", arch)].(float64), 'f', -1, 64)})
|
|
|
|
rows = append(rows, []string{"", "embedding length", strconv.FormatFloat(resp.ModelInfo[fmt.Sprintf("%s.embedding_length", arch)].(float64), 'f', -1, 64)})
|
|
|
|
} else {
|
|
|
|
rows = append(rows, []string{"", "architecture", resp.Details.Family})
|
|
|
|
rows = append(rows, []string{"", "parameters", resp.Details.ParameterSize})
|
|
|
|
}
|
|
|
|
rows = append(rows, []string{"", "quantization", resp.Details.QuantizationLevel})
|
|
|
|
return
|
|
|
|
})
|
2024-06-19 21:19:02 +00:00
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
if resp.ProjectorInfo != nil {
|
|
|
|
tableRender("Projector", func() (rows [][]string) {
|
|
|
|
arch := resp.ProjectorInfo["general.architecture"].(string)
|
|
|
|
rows = append(rows, []string{"", "architecture", arch})
|
|
|
|
rows = append(rows, []string{"", "parameters", format.HumanNumber(uint64(resp.ProjectorInfo["general.parameter_count"].(float64)))})
|
|
|
|
rows = append(rows, []string{"", "embedding length", strconv.FormatFloat(resp.ProjectorInfo[fmt.Sprintf("%s.vision.embedding_length", arch)].(float64), 'f', -1, 64)})
|
|
|
|
rows = append(rows, []string{"", "dimensions", strconv.FormatFloat(resp.ProjectorInfo[fmt.Sprintf("%s.vision.projection_dim", arch)].(float64), 'f', -1, 64)})
|
|
|
|
return
|
|
|
|
})
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
if resp.Parameters != "" {
|
|
|
|
tableRender("Parameters", func() (rows [][]string) {
|
|
|
|
scanner := bufio.NewScanner(strings.NewReader(resp.Parameters))
|
|
|
|
for scanner.Scan() {
|
|
|
|
if text := scanner.Text(); text != "" {
|
|
|
|
rows = append(rows, append([]string{""}, strings.Fields(text)...))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
})
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
head := func(s string, n int) (rows [][]string) {
|
|
|
|
scanner := bufio.NewScanner(strings.NewReader(s))
|
|
|
|
for scanner.Scan() && (len(rows) < n || n < 0) {
|
|
|
|
if text := scanner.Text(); text != "" {
|
|
|
|
rows = append(rows, []string{"", strings.TrimSpace(text)})
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
}
|
2024-09-11 18:01:30 +00:00
|
|
|
return
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
if resp.System != "" {
|
|
|
|
tableRender("System", func() [][]string {
|
|
|
|
return head(resp.System, 2)
|
|
|
|
})
|
|
|
|
}
|
2024-06-19 21:19:02 +00:00
|
|
|
|
2024-09-11 18:01:30 +00:00
|
|
|
if resp.License != "" {
|
|
|
|
tableRender("License", func() [][]string {
|
|
|
|
return head(resp.License, 2)
|
|
|
|
})
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
2024-09-11 18:01:30 +00:00
|
|
|
|
|
|
|
return nil
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
|
2023-07-24 15:27:28 +00:00
|
|
|
func CopyHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-24 15:27:28 +00:00
|
|
|
|
|
|
|
req := api.CopyRequest{Source: args[0], Destination: args[1]}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Copy(cmd.Context(), &req); err != nil {
|
2023-07-24 15:27:28 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
fmt.Printf("copied '%s' to '%s'\n", args[0], args[1])
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func PullHandler(cmd *cobra.Command, args []string) error {
|
2023-07-21 22:42:19 +00:00
|
|
|
insecure, err := cmd.Flags().GetBool("insecure")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.Stop()
|
|
|
|
|
|
|
|
bars := make(map[string]*progress.Bar)
|
|
|
|
|
2023-11-19 18:43:21 +00:00
|
|
|
var status string
|
|
|
|
var spinner *progress.Spinner
|
2023-11-15 00:33:24 +00:00
|
|
|
|
2023-07-19 01:51:30 +00:00
|
|
|
fn := func(resp api.ProgressResponse) error {
|
2023-11-15 00:33:24 +00:00
|
|
|
if resp.Digest != "" {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
bar, ok := bars[resp.Digest]
|
|
|
|
if !ok {
|
2023-11-20 16:37:17 +00:00
|
|
|
bar = progress.NewBar(fmt.Sprintf("pulling %s...", resp.Digest[7:19]), resp.Total, resp.Completed)
|
2023-11-15 00:33:24 +00:00
|
|
|
bars[resp.Digest] = bar
|
|
|
|
p.Add(resp.Digest, bar)
|
|
|
|
}
|
|
|
|
|
|
|
|
bar.Set(resp.Completed)
|
|
|
|
} else if status != resp.Status {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
status = resp.Status
|
|
|
|
spinner = progress.NewSpinner(status)
|
|
|
|
p.Add(status, spinner)
|
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
return nil
|
|
|
|
}
|
2023-07-07 14:22:37 +00:00
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
request := api.PullRequest{Name: args[0], Insecure: insecure}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Pull(cmd.Context(), &request, fn); err != nil {
|
2023-11-15 00:33:24 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2023-07-06 20:49:31 +00:00
|
|
|
}
|
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
type generateContextKey string
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
type runOptions struct {
|
2024-01-25 20:12:36 +00:00
|
|
|
Model string
|
|
|
|
ParentModel string
|
|
|
|
Prompt string
|
|
|
|
Messages []api.Message
|
|
|
|
WordWrap bool
|
|
|
|
Format string
|
|
|
|
System string
|
|
|
|
Images []api.ImageData
|
|
|
|
Options map[string]interface{}
|
|
|
|
MultiModal bool
|
2024-05-14 00:17:36 +00:00
|
|
|
KeepAlive *api.Duration
|
2023-11-29 17:56:42 +00:00
|
|
|
}
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
type displayResponseState struct {
|
|
|
|
lineLength int
|
|
|
|
wordBuffer string
|
|
|
|
}
|
|
|
|
|
|
|
|
func displayResponse(content string, wordWrap bool, state *displayResponseState) {
|
|
|
|
termWidth, _, _ := term.GetSize(int(os.Stdout.Fd()))
|
|
|
|
if wordWrap && termWidth >= 10 {
|
|
|
|
for _, ch := range content {
|
2024-05-16 00:24:17 +00:00
|
|
|
if state.lineLength+1 > termWidth-5 {
|
|
|
|
if runewidth.StringWidth(state.wordBuffer) > termWidth-10 {
|
2024-01-12 20:05:52 +00:00
|
|
|
fmt.Printf("%s%c", state.wordBuffer, ch)
|
|
|
|
state.wordBuffer = ""
|
|
|
|
state.lineLength = 0
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// backtrack the length of the last word and clear to the end of the line
|
2024-05-30 17:24:21 +00:00
|
|
|
a := runewidth.StringWidth(state.wordBuffer)
|
|
|
|
if a > 0 {
|
2024-05-30 17:38:07 +00:00
|
|
|
fmt.Printf("\x1b[%dD", a)
|
2024-05-30 17:24:21 +00:00
|
|
|
}
|
|
|
|
fmt.Printf("\x1b[K\n")
|
2024-01-12 20:05:52 +00:00
|
|
|
fmt.Printf("%s%c", state.wordBuffer, ch)
|
2024-05-15 23:29:33 +00:00
|
|
|
chWidth := runewidth.RuneWidth(ch)
|
|
|
|
|
|
|
|
state.lineLength = runewidth.StringWidth(state.wordBuffer) + chWidth
|
2024-01-12 20:05:52 +00:00
|
|
|
} else {
|
|
|
|
fmt.Print(string(ch))
|
2024-05-15 23:29:33 +00:00
|
|
|
state.lineLength += runewidth.RuneWidth(ch)
|
|
|
|
if runewidth.RuneWidth(ch) >= 2 {
|
|
|
|
state.wordBuffer = ""
|
|
|
|
continue
|
2024-05-16 00:24:17 +00:00
|
|
|
}
|
2024-01-12 20:05:52 +00:00
|
|
|
|
|
|
|
switch ch {
|
|
|
|
case ' ':
|
|
|
|
state.wordBuffer = ""
|
|
|
|
case '\n':
|
|
|
|
state.lineLength = 0
|
|
|
|
default:
|
|
|
|
state.wordBuffer += string(ch)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
fmt.Printf("%s%s", state.wordBuffer, content)
|
|
|
|
if len(state.wordBuffer) > 0 {
|
|
|
|
state.wordBuffer = ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func chat(cmd *cobra.Command, opts runOptions) (*api.Message, error) {
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.StopAndClear()
|
|
|
|
|
|
|
|
spinner := progress.NewSpinner("")
|
|
|
|
p.Add("", spinner)
|
|
|
|
|
|
|
|
cancelCtx, cancel := context.WithCancel(cmd.Context())
|
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
sigChan := make(chan os.Signal, 1)
|
|
|
|
signal.Notify(sigChan, syscall.SIGINT)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
<-sigChan
|
|
|
|
cancel()
|
|
|
|
}()
|
|
|
|
|
|
|
|
var state *displayResponseState = &displayResponseState{}
|
|
|
|
var latest api.ChatResponse
|
|
|
|
var fullResponse strings.Builder
|
|
|
|
var role string
|
|
|
|
|
|
|
|
fn := func(response api.ChatResponse) error {
|
|
|
|
p.StopAndClear()
|
|
|
|
|
|
|
|
latest = response
|
|
|
|
|
|
|
|
role = response.Message.Role
|
|
|
|
content := response.Message.Content
|
|
|
|
fullResponse.WriteString(content)
|
|
|
|
|
|
|
|
displayResponse(content, opts.WordWrap, state)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
req := &api.ChatRequest{
|
|
|
|
Model: opts.Model,
|
|
|
|
Messages: opts.Messages,
|
|
|
|
Format: opts.Format,
|
|
|
|
Options: opts.Options,
|
|
|
|
}
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
if opts.KeepAlive != nil {
|
|
|
|
req.KeepAlive = opts.KeepAlive
|
|
|
|
}
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
if err := client.Chat(cancelCtx, req, fn); err != nil {
|
|
|
|
if errors.Is(err, context.Canceled) {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(opts.Messages) > 0 {
|
|
|
|
fmt.Println()
|
|
|
|
fmt.Println()
|
|
|
|
}
|
|
|
|
|
|
|
|
verbose, err := cmd.Flags().GetBool("verbose")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if verbose {
|
|
|
|
latest.Summary()
|
|
|
|
}
|
|
|
|
|
|
|
|
return &api.Message{Role: role, Content: fullResponse.String()}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func generate(cmd *cobra.Command, opts runOptions) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-09-18 19:26:56 +00:00
|
|
|
if err != nil {
|
2023-12-05 00:35:29 +00:00
|
|
|
return err
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2023-07-07 17:12:58 +00:00
|
|
|
|
2023-11-15 00:58:51 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
2023-11-20 05:49:08 +00:00
|
|
|
defer p.StopAndClear()
|
2023-12-05 00:35:29 +00:00
|
|
|
|
2023-11-15 00:58:51 +00:00
|
|
|
spinner := progress.NewSpinner("")
|
|
|
|
p.Add("", spinner)
|
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
var latest api.GenerateResponse
|
|
|
|
|
|
|
|
generateContext, ok := cmd.Context().Value(generateContextKey("context")).([]int)
|
|
|
|
if !ok {
|
|
|
|
generateContext = []int{}
|
|
|
|
}
|
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
ctx, cancel := context.WithCancel(cmd.Context())
|
2023-09-29 00:13:01 +00:00
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
sigChan := make(chan os.Signal, 1)
|
|
|
|
signal.Notify(sigChan, syscall.SIGINT)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
<-sigChan
|
|
|
|
cancel()
|
|
|
|
}()
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
var state *displayResponseState = &displayResponseState{}
|
2023-09-22 20:36:08 +00:00
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
fn := func(response api.GenerateResponse) error {
|
2023-11-15 00:58:51 +00:00
|
|
|
p.StopAndClear()
|
2023-12-05 00:35:29 +00:00
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
latest = response
|
2024-01-12 20:05:52 +00:00
|
|
|
content := response.Response
|
2023-07-13 01:18:06 +00:00
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
displayResponse(content, opts.WordWrap, state)
|
2023-09-22 20:36:08 +00:00
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
return nil
|
|
|
|
}
|
2023-07-07 21:04:43 +00:00
|
|
|
|
2024-02-02 05:33:06 +00:00
|
|
|
if opts.MultiModal {
|
|
|
|
opts.Prompt, opts.Images, err = extractFileData(opts.Prompt)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
request := api.GenerateRequest{
|
2024-05-14 22:17:04 +00:00
|
|
|
Model: opts.Model,
|
|
|
|
Prompt: opts.Prompt,
|
|
|
|
Context: generateContext,
|
|
|
|
Images: opts.Images,
|
|
|
|
Format: opts.Format,
|
|
|
|
System: opts.System,
|
|
|
|
Options: opts.Options,
|
|
|
|
KeepAlive: opts.KeepAlive,
|
2023-10-12 22:56:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if err := client.Generate(ctx, &request, fn); err != nil {
|
2024-01-05 20:22:32 +00:00
|
|
|
if errors.Is(err, context.Canceled) {
|
2023-12-05 00:35:29 +00:00
|
|
|
return nil
|
2023-07-07 21:04:43 +00:00
|
|
|
}
|
2024-01-05 20:22:32 +00:00
|
|
|
return err
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2024-01-05 20:22:32 +00:00
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
if opts.Prompt != "" {
|
2023-07-07 17:12:58 +00:00
|
|
|
fmt.Println()
|
|
|
|
fmt.Println()
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2023-07-13 01:18:06 +00:00
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
if !latest.Done {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
verbose, err := cmd.Flags().GetBool("verbose")
|
|
|
|
if err != nil {
|
2023-12-05 00:35:29 +00:00
|
|
|
return err
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2023-07-18 18:59:42 +00:00
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
if verbose {
|
|
|
|
latest.Summary()
|
2023-07-07 17:12:58 +00:00
|
|
|
}
|
2023-07-06 20:49:31 +00:00
|
|
|
|
2023-12-11 21:56:22 +00:00
|
|
|
ctx = context.WithValue(cmd.Context(), generateContextKey("context"), latest.Context)
|
|
|
|
cmd.SetContext(ctx)
|
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
return nil
|
2023-07-06 20:49:31 +00:00
|
|
|
}
|
|
|
|
|
2024-08-14 00:54:19 +00:00
|
|
|
func RunServer(_ *cobra.Command, _ []string) error {
|
2023-09-21 00:49:48 +00:00
|
|
|
if err := initializeKeypair(); err != nil {
|
2023-08-11 17:58:23 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-07-03 23:44:57 +00:00
|
|
|
ln, err := net.Listen("tcp", envconfig.Host().Host)
|
2023-08-07 03:34:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-04 04:47:00 +00:00
|
|
|
|
2024-05-06 23:01:37 +00:00
|
|
|
err = server.Serve(ln)
|
|
|
|
if errors.Is(err, http.ErrServerClosed) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
2023-07-04 04:47:00 +00:00
|
|
|
}
|
|
|
|
|
2023-08-11 17:58:23 +00:00
|
|
|
func initializeKeypair() error {
|
|
|
|
home, err := os.UserHomeDir()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
privKeyPath := filepath.Join(home, ".ollama", "id_ed25519")
|
|
|
|
pubKeyPath := filepath.Join(home, ".ollama", "id_ed25519.pub")
|
|
|
|
|
|
|
|
_, err = os.Stat(privKeyPath)
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
fmt.Printf("Couldn't find '%s'. Generating new private key.\n", privKeyPath)
|
2024-02-24 00:50:41 +00:00
|
|
|
cryptoPublicKey, cryptoPrivateKey, err := ed25519.GenerateKey(rand.Reader)
|
2023-08-11 17:58:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
privateKeyBytes, err := ssh.MarshalPrivateKey(cryptoPrivateKey, "")
|
2023-08-11 17:58:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
if err := os.MkdirAll(filepath.Dir(privKeyPath), 0o755); err != nil {
|
2023-08-11 22:35:55 +00:00
|
|
|
return fmt.Errorf("could not create directory %w", err)
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
if err := os.WriteFile(privKeyPath, pem.EncodeToMemory(privateKeyBytes), 0o600); err != nil {
|
2023-08-11 17:58:23 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
sshPublicKey, err := ssh.NewPublicKey(cryptoPublicKey)
|
2023-08-11 17:58:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
publicKeyBytes := ssh.MarshalAuthorizedKey(sshPublicKey)
|
2023-08-11 17:58:23 +00:00
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
if err := os.WriteFile(pubKeyPath, publicKeyBytes, 0o644); err != nil {
|
2023-08-11 17:58:23 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
fmt.Printf("Your new public key is: \n\n%s\n", publicKeyBytes)
|
2023-08-11 17:58:23 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
func checkServerHeartbeat(cmd *cobra.Command, _ []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Heartbeat(cmd.Context()); err != nil {
|
2023-12-27 00:03:45 +00:00
|
|
|
if !strings.Contains(err.Error(), " refused") {
|
2023-07-31 21:38:10 +00:00
|
|
|
return err
|
|
|
|
}
|
2023-12-27 00:03:45 +00:00
|
|
|
if err := startApp(cmd.Context(), client); err != nil {
|
2024-08-01 21:52:15 +00:00
|
|
|
return errors.New("could not connect to ollama app, is it running?")
|
2023-07-31 20:25:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-11-22 17:41:02 +00:00
|
|
|
func versionHandler(cmd *cobra.Command, _ []string) {
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
serverVersion, err := client.Version(cmd.Context())
|
|
|
|
if err != nil {
|
2023-12-01 20:10:27 +00:00
|
|
|
fmt.Println("Warning: could not connect to a running Ollama instance")
|
|
|
|
}
|
|
|
|
|
|
|
|
if serverVersion != "" {
|
|
|
|
fmt.Printf("ollama version is %s\n", serverVersion)
|
2023-11-22 17:41:02 +00:00
|
|
|
}
|
|
|
|
|
2023-10-16 16:57:19 +00:00
|
|
|
if serverVersion != version.Version {
|
2023-12-01 20:10:27 +00:00
|
|
|
fmt.Printf("Warning: client version is %s\n", version.Version)
|
2023-10-16 16:57:19 +00:00
|
|
|
}
|
2023-11-22 17:41:02 +00:00
|
|
|
}
|
|
|
|
|
2024-05-24 21:57:15 +00:00
|
|
|
func appendEnvDocs(cmd *cobra.Command, envs []envconfig.EnvVar) {
|
2024-05-18 18:51:57 +00:00
|
|
|
if len(envs) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
envUsage := `
|
2024-03-07 21:57:07 +00:00
|
|
|
Environment Variables:
|
|
|
|
`
|
2024-05-18 18:51:57 +00:00
|
|
|
for _, e := range envs {
|
2024-05-24 21:57:15 +00:00
|
|
|
envUsage += fmt.Sprintf(" %-24s %s\n", e.Name, e.Description)
|
2024-05-18 18:51:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
cmd.SetUsageTemplate(cmd.UsageTemplate() + envUsage)
|
2024-03-07 21:57:07 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
func NewCLI() *cobra.Command {
|
|
|
|
log.SetFlags(log.LstdFlags | log.Lshortfile)
|
2023-11-22 17:41:02 +00:00
|
|
|
cobra.EnableCommandSorting = false
|
2023-07-03 19:22:44 +00:00
|
|
|
|
2024-10-25 20:43:16 +00:00
|
|
|
if runtime.GOOS == "windows" && term.IsTerminal(int(os.Stdout.Fd())) {
|
2024-03-11 22:21:57 +00:00
|
|
|
console.ConsoleFromFile(os.Stdin) //nolint:errcheck
|
2023-11-24 06:21:32 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
rootCmd := &cobra.Command{
|
2023-08-14 18:15:53 +00:00
|
|
|
Use: "ollama",
|
|
|
|
Short: "Large language model runner",
|
|
|
|
SilenceUsage: true,
|
|
|
|
SilenceErrors: true,
|
2023-07-03 19:22:44 +00:00
|
|
|
CompletionOptions: cobra.CompletionOptions{
|
|
|
|
DisableDefaultCmd: true,
|
|
|
|
},
|
2023-11-22 17:41:02 +00:00
|
|
|
Run: func(cmd *cobra.Command, args []string) {
|
|
|
|
if version, _ := cmd.Flags().GetBool("version"); version {
|
|
|
|
versionHandler(cmd, args)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.Print(cmd.UsageString())
|
|
|
|
},
|
2023-07-03 19:22:44 +00:00
|
|
|
}
|
|
|
|
|
2023-11-22 17:41:02 +00:00
|
|
|
rootCmd.Flags().BoolP("version", "v", false, "Show version information")
|
2023-07-03 19:22:44 +00:00
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
createCmd := &cobra.Command{
|
2023-07-31 20:25:57 +00:00
|
|
|
Use: "create MODEL",
|
|
|
|
Short: "Create a model from a Modelfile",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: CreateHandler,
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2024-10-22 20:32:24 +00:00
|
|
|
createCmd.Flags().StringP("file", "f", "", "Name of the Modelfile (default \"Modelfile\"")
|
2024-05-10 20:06:13 +00:00
|
|
|
createCmd.Flags().StringP("quantize", "q", "", "Quantize model to this level (e.g. q4_0)")
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2023-09-06 18:04:17 +00:00
|
|
|
showCmd := &cobra.Command{
|
|
|
|
Use: "show MODEL",
|
|
|
|
Short: "Show information for a model",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-09-06 18:04:17 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: ShowHandler,
|
|
|
|
}
|
|
|
|
|
|
|
|
showCmd.Flags().Bool("license", false, "Show license of a model")
|
|
|
|
showCmd.Flags().Bool("modelfile", false, "Show Modelfile of a model")
|
|
|
|
showCmd.Flags().Bool("parameters", false, "Show parameters of a model")
|
|
|
|
showCmd.Flags().Bool("template", false, "Show template of a model")
|
2023-12-12 19:43:19 +00:00
|
|
|
showCmd.Flags().Bool("system", false, "Show system message of a model")
|
2023-09-06 18:04:17 +00:00
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
runCmd := &cobra.Command{
|
2024-04-15 23:58:00 +00:00
|
|
|
Use: "run MODEL [PROMPT]",
|
|
|
|
Short: "Run a model",
|
|
|
|
Args: cobra.MinimumNArgs(1),
|
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: RunHandler,
|
2023-07-03 19:22:44 +00:00
|
|
|
}
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
runCmd.Flags().String("keepalive", "", "Duration to keep a model loaded (e.g. 5m)")
|
2023-07-13 01:18:06 +00:00
|
|
|
runCmd.Flags().Bool("verbose", false, "Show timings for response")
|
2023-08-22 04:56:56 +00:00
|
|
|
runCmd.Flags().Bool("insecure", false, "Use an insecure registry")
|
2023-09-22 20:36:08 +00:00
|
|
|
runCmd.Flags().Bool("nowordwrap", false, "Don't wrap words to the next line automatically")
|
2023-11-14 02:54:02 +00:00
|
|
|
runCmd.Flags().String("format", "", "Response format (e.g. json)")
|
2024-09-11 23:36:21 +00:00
|
|
|
|
|
|
|
stopCmd := &cobra.Command{
|
|
|
|
Use: "stop MODEL",
|
|
|
|
Short: "Stop a running model",
|
|
|
|
Args: cobra.ExactArgs(1),
|
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: StopHandler,
|
|
|
|
}
|
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
serveCmd := &cobra.Command{
|
|
|
|
Use: "serve",
|
|
|
|
Aliases: []string{"start"},
|
|
|
|
Short: "Start ollama",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(0),
|
2023-07-06 20:49:31 +00:00
|
|
|
RunE: RunServer,
|
2023-07-03 19:22:44 +00:00
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
pullCmd := &cobra.Command{
|
2023-07-31 20:25:57 +00:00
|
|
|
Use: "pull MODEL",
|
|
|
|
Short: "Pull a model from a registry",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: PullHandler,
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2023-07-21 22:42:19 +00:00
|
|
|
pullCmd.Flags().Bool("insecure", false, "Use an insecure registry")
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
pushCmd := &cobra.Command{
|
2023-07-31 20:25:57 +00:00
|
|
|
Use: "push MODEL",
|
|
|
|
Short: "Push a model to a registry",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: PushHandler,
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2023-07-21 22:42:19 +00:00
|
|
|
pushCmd.Flags().Bool("insecure", false, "Use an insecure registry")
|
|
|
|
|
2023-07-18 16:09:45 +00:00
|
|
|
listCmd := &cobra.Command{
|
2023-07-21 22:42:19 +00:00
|
|
|
Use: "list",
|
2023-07-20 22:28:27 +00:00
|
|
|
Aliases: []string{"ls"},
|
2023-07-21 22:42:19 +00:00
|
|
|
Short: "List models",
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
2023-07-21 22:42:19 +00:00
|
|
|
RunE: ListHandler,
|
2023-07-20 23:09:23 +00:00
|
|
|
}
|
2024-05-14 00:17:36 +00:00
|
|
|
|
|
|
|
psCmd := &cobra.Command{
|
|
|
|
Use: "ps",
|
|
|
|
Short: "List running models",
|
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: ListRunningHandler,
|
|
|
|
}
|
|
|
|
|
2023-07-24 15:27:28 +00:00
|
|
|
copyCmd := &cobra.Command{
|
2024-05-01 19:39:05 +00:00
|
|
|
Use: "cp SOURCE DESTINATION",
|
2023-07-31 20:25:57 +00:00
|
|
|
Short: "Copy a model",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(2),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: CopyHandler,
|
2023-07-24 15:27:28 +00:00
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
deleteCmd := &cobra.Command{
|
2023-10-18 18:57:22 +00:00
|
|
|
Use: "rm MODEL [MODEL...]",
|
2023-07-31 20:25:57 +00:00
|
|
|
Short: "Remove a model",
|
|
|
|
Args: cobra.MinimumNArgs(1),
|
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: DeleteHandler,
|
2023-07-18 16:09:45 +00:00
|
|
|
}
|
|
|
|
|
2024-05-24 21:57:15 +00:00
|
|
|
envVars := envconfig.AsMap()
|
|
|
|
|
|
|
|
envs := []envconfig.EnvVar{envVars["OLLAMA_HOST"]}
|
2024-05-18 18:51:57 +00:00
|
|
|
|
2024-03-07 21:57:07 +00:00
|
|
|
for _, cmd := range []*cobra.Command{
|
|
|
|
createCmd,
|
|
|
|
showCmd,
|
|
|
|
runCmd,
|
2024-09-11 23:36:21 +00:00
|
|
|
stopCmd,
|
2024-03-07 21:57:07 +00:00
|
|
|
pullCmd,
|
|
|
|
pushCmd,
|
|
|
|
listCmd,
|
2024-05-14 00:17:36 +00:00
|
|
|
psCmd,
|
2024-03-07 21:57:07 +00:00
|
|
|
copyCmd,
|
|
|
|
deleteCmd,
|
2024-05-24 21:57:15 +00:00
|
|
|
serveCmd,
|
2024-03-07 21:57:07 +00:00
|
|
|
} {
|
2024-05-18 18:51:57 +00:00
|
|
|
switch cmd {
|
|
|
|
case runCmd:
|
2024-05-24 21:57:15 +00:00
|
|
|
appendEnvDocs(cmd, []envconfig.EnvVar{envVars["OLLAMA_HOST"], envVars["OLLAMA_NOHISTORY"]})
|
|
|
|
case serveCmd:
|
|
|
|
appendEnvDocs(cmd, []envconfig.EnvVar{
|
|
|
|
envVars["OLLAMA_DEBUG"],
|
|
|
|
envVars["OLLAMA_HOST"],
|
|
|
|
envVars["OLLAMA_KEEP_ALIVE"],
|
|
|
|
envVars["OLLAMA_MAX_LOADED_MODELS"],
|
|
|
|
envVars["OLLAMA_MAX_QUEUE"],
|
|
|
|
envVars["OLLAMA_MODELS"],
|
|
|
|
envVars["OLLAMA_NUM_PARALLEL"],
|
|
|
|
envVars["OLLAMA_NOPRUNE"],
|
|
|
|
envVars["OLLAMA_ORIGINS"],
|
2024-07-23 22:14:28 +00:00
|
|
|
envVars["OLLAMA_SCHED_SPREAD"],
|
2024-05-24 21:57:15 +00:00
|
|
|
envVars["OLLAMA_TMPDIR"],
|
2024-05-30 16:36:51 +00:00
|
|
|
envVars["OLLAMA_FLASH_ATTENTION"],
|
|
|
|
envVars["OLLAMA_LLM_LIBRARY"],
|
2024-09-05 20:46:35 +00:00
|
|
|
envVars["OLLAMA_GPU_OVERHEAD"],
|
2024-09-05 21:00:08 +00:00
|
|
|
envVars["OLLAMA_LOAD_TIMEOUT"],
|
2024-05-24 21:57:15 +00:00
|
|
|
})
|
2024-05-18 18:51:57 +00:00
|
|
|
default:
|
|
|
|
appendEnvDocs(cmd, envs)
|
|
|
|
}
|
2024-03-07 21:57:07 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
rootCmd.AddCommand(
|
|
|
|
serveCmd,
|
2023-07-17 00:02:22 +00:00
|
|
|
createCmd,
|
2023-09-06 18:04:17 +00:00
|
|
|
showCmd,
|
2023-07-03 21:14:20 +00:00
|
|
|
runCmd,
|
2024-09-11 23:36:21 +00:00
|
|
|
stopCmd,
|
2023-07-17 00:02:22 +00:00
|
|
|
pullCmd,
|
|
|
|
pushCmd,
|
2023-07-18 16:09:45 +00:00
|
|
|
listCmd,
|
2024-05-14 00:17:36 +00:00
|
|
|
psCmd,
|
2023-07-24 15:27:28 +00:00
|
|
|
copyCmd,
|
2023-07-20 23:09:23 +00:00
|
|
|
deleteCmd,
|
2023-07-03 19:22:44 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return rootCmd
|
|
|
|
}
|