2023-07-03 19:22:44 +00:00
|
|
|
package cmd
|
|
|
|
|
|
|
|
import (
|
2024-03-07 05:01:51 +00:00
|
|
|
"archive/zip"
|
2023-11-14 22:07:40 +00:00
|
|
|
"bytes"
|
2023-07-03 19:22:44 +00:00
|
|
|
"context"
|
2023-08-11 17:58:23 +00:00
|
|
|
"crypto/ed25519"
|
|
|
|
"crypto/rand"
|
2023-11-14 22:07:40 +00:00
|
|
|
"crypto/sha256"
|
2023-08-11 17:58:23 +00:00
|
|
|
"encoding/pem"
|
2023-07-06 22:43:04 +00:00
|
|
|
"errors"
|
2023-07-06 16:24:49 +00:00
|
|
|
"fmt"
|
2023-07-18 21:01:19 +00:00
|
|
|
"io"
|
2023-07-03 19:22:44 +00:00
|
|
|
"log"
|
2024-05-14 00:17:36 +00:00
|
|
|
"math"
|
2023-07-03 19:22:44 +00:00
|
|
|
"net"
|
2023-10-30 15:10:18 +00:00
|
|
|
"net/http"
|
2023-07-03 19:22:44 +00:00
|
|
|
"os"
|
2023-09-29 00:13:01 +00:00
|
|
|
"os/signal"
|
2023-07-19 02:34:05 +00:00
|
|
|
"path/filepath"
|
2024-04-24 21:27:12 +00:00
|
|
|
"regexp"
|
2023-07-31 20:25:57 +00:00
|
|
|
"runtime"
|
2024-05-22 04:30:52 +00:00
|
|
|
"slices"
|
2023-07-07 17:12:58 +00:00
|
|
|
"strings"
|
2023-09-29 00:13:01 +00:00
|
|
|
"syscall"
|
2023-07-06 22:43:04 +00:00
|
|
|
"time"
|
2023-07-03 19:22:44 +00:00
|
|
|
|
2023-11-24 06:21:32 +00:00
|
|
|
"github.com/containerd/console"
|
2024-05-15 23:29:33 +00:00
|
|
|
"github.com/mattn/go-runewidth"
|
2023-07-18 16:09:45 +00:00
|
|
|
"github.com/olekukonko/tablewriter"
|
2023-07-06 20:49:31 +00:00
|
|
|
"github.com/spf13/cobra"
|
2023-08-11 17:58:23 +00:00
|
|
|
"golang.org/x/crypto/ssh"
|
2023-09-22 20:36:08 +00:00
|
|
|
"golang.org/x/term"
|
2023-07-06 20:49:31 +00:00
|
|
|
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/api"
|
2024-04-30 18:02:08 +00:00
|
|
|
"github.com/ollama/ollama/auth"
|
2024-05-24 21:57:15 +00:00
|
|
|
"github.com/ollama/ollama/envconfig"
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/format"
|
2024-05-20 18:26:45 +00:00
|
|
|
"github.com/ollama/ollama/parser"
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/progress"
|
|
|
|
"github.com/ollama/ollama/server"
|
2024-04-30 18:02:08 +00:00
|
|
|
"github.com/ollama/ollama/types/errtypes"
|
|
|
|
"github.com/ollama/ollama/types/model"
|
2024-03-26 20:04:17 +00:00
|
|
|
"github.com/ollama/ollama/version"
|
2023-07-03 19:22:44 +00:00
|
|
|
)
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func CreateHandler(cmd *cobra.Command, args []string) error {
|
2023-07-17 00:02:22 +00:00
|
|
|
filename, _ := cmd.Flags().GetString("file")
|
2023-07-19 02:34:05 +00:00
|
|
|
filename, err := filepath.Abs(filename)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.Stop()
|
|
|
|
|
2024-04-30 17:55:19 +00:00
|
|
|
f, err := os.Open(filename)
|
2023-11-14 22:07:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-04-30 17:55:19 +00:00
|
|
|
defer f.Close()
|
2023-11-14 22:07:40 +00:00
|
|
|
|
2024-05-20 18:26:45 +00:00
|
|
|
modelfile, err := parser.ParseFile(f)
|
2023-11-14 22:07:40 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
home, err := os.UserHomeDir()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-11-19 18:43:21 +00:00
|
|
|
status := "transferring model data"
|
|
|
|
spinner := progress.NewSpinner(status)
|
2023-11-16 00:59:49 +00:00
|
|
|
p.Add(status, spinner)
|
|
|
|
|
2024-04-30 17:55:19 +00:00
|
|
|
for i := range modelfile.Commands {
|
|
|
|
switch modelfile.Commands[i].Name {
|
2023-11-14 22:07:40 +00:00
|
|
|
case "model", "adapter":
|
2024-04-30 17:55:19 +00:00
|
|
|
path := modelfile.Commands[i].Args
|
2023-11-14 22:07:40 +00:00
|
|
|
if path == "~" {
|
|
|
|
path = home
|
|
|
|
} else if strings.HasPrefix(path, "~/") {
|
|
|
|
path = filepath.Join(home, path[2:])
|
|
|
|
}
|
|
|
|
|
2023-11-20 21:43:48 +00:00
|
|
|
if !filepath.IsAbs(path) {
|
|
|
|
path = filepath.Join(filepath.Dir(filename), path)
|
|
|
|
}
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
fi, err := os.Stat(path)
|
2024-04-30 17:55:19 +00:00
|
|
|
if errors.Is(err, os.ErrNotExist) && modelfile.Commands[i].Name == "model" {
|
2023-11-15 18:57:09 +00:00
|
|
|
continue
|
2023-11-14 22:07:40 +00:00
|
|
|
} else if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
if fi.IsDir() {
|
2024-04-22 18:02:25 +00:00
|
|
|
// this is likely a safetensors or pytorch directory
|
|
|
|
// TODO make this work w/ adapters
|
|
|
|
tempfile, err := tempZipFiles(path)
|
2024-03-07 05:01:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-04-22 18:02:25 +00:00
|
|
|
defer os.RemoveAll(tempfile)
|
2024-04-15 18:26:42 +00:00
|
|
|
|
2024-04-22 18:02:25 +00:00
|
|
|
path = tempfile
|
2023-11-14 22:07:40 +00:00
|
|
|
}
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
digest, err := createBlob(cmd, client, path)
|
|
|
|
if err != nil {
|
2023-11-14 22:07:40 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-04-30 17:55:19 +00:00
|
|
|
modelfile.Commands[i].Args = "@" + digest
|
2023-11-14 22:07:40 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-17 21:14:41 +00:00
|
|
|
|
2024-04-22 18:02:25 +00:00
|
|
|
bars := make(map[string]*progress.Bar)
|
2023-07-25 18:25:13 +00:00
|
|
|
fn := func(resp api.ProgressResponse) error {
|
2023-11-15 00:33:24 +00:00
|
|
|
if resp.Digest != "" {
|
|
|
|
spinner.Stop()
|
|
|
|
|
|
|
|
bar, ok := bars[resp.Digest]
|
|
|
|
if !ok {
|
2023-11-20 16:37:17 +00:00
|
|
|
bar = progress.NewBar(fmt.Sprintf("pulling %s...", resp.Digest[7:19]), resp.Total, resp.Completed)
|
2023-11-15 00:33:24 +00:00
|
|
|
bars[resp.Digest] = bar
|
|
|
|
p.Add(resp.Digest, bar)
|
|
|
|
}
|
|
|
|
|
|
|
|
bar.Set(resp.Completed)
|
|
|
|
} else if status != resp.Status {
|
|
|
|
spinner.Stop()
|
|
|
|
|
|
|
|
status = resp.Status
|
|
|
|
spinner = progress.NewSpinner(status)
|
|
|
|
p.Add(status, spinner)
|
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-05-10 20:06:13 +00:00
|
|
|
quantize, _ := cmd.Flags().GetString("quantize")
|
2024-04-05 15:49:04 +00:00
|
|
|
|
2024-05-10 20:06:13 +00:00
|
|
|
request := api.CreateRequest{Name: args[0], Modelfile: modelfile.String(), Quantize: quantize}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Create(cmd.Context(), &request, fn); err != nil {
|
2023-07-17 00:02:22 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-04-22 18:02:25 +00:00
|
|
|
func tempZipFiles(path string) (string, error) {
|
|
|
|
tempfile, err := os.CreateTemp("", "ollama-tf")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer tempfile.Close()
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
detectContentType := func(path string) (string, error) {
|
|
|
|
f, err := os.Open(path)
|
2024-04-22 18:02:25 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2024-04-25 21:41:30 +00:00
|
|
|
defer f.Close()
|
2024-04-22 18:02:25 +00:00
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
var b bytes.Buffer
|
|
|
|
b.Grow(512)
|
2024-04-22 18:02:25 +00:00
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
if _, err := io.CopyN(&b, f, 512); err != nil && !errors.Is(err, io.EOF) {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
contentType, _, _ := strings.Cut(http.DetectContentType(b.Bytes()), ";")
|
|
|
|
return contentType, nil
|
2024-04-22 18:02:25 +00:00
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
glob := func(pattern, contentType string) ([]string, error) {
|
|
|
|
matches, err := filepath.Glob(pattern)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, safetensor := range matches {
|
|
|
|
if ct, err := detectContentType(safetensor); err != nil {
|
|
|
|
return nil, err
|
|
|
|
} else if ct != contentType {
|
|
|
|
return nil, fmt.Errorf("invalid content type: expected %s for %s", ct, safetensor)
|
2024-04-22 18:02:25 +00:00
|
|
|
}
|
2024-04-25 21:41:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return matches, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var files []string
|
|
|
|
if st, _ := glob(filepath.Join(path, "model*.safetensors"), "application/octet-stream"); len(st) > 0 {
|
|
|
|
// safetensors files might be unresolved git lfs references; skip if they are
|
|
|
|
// covers model-x-of-y.safetensors, model.fp32-x-of-y.safetensors, model.safetensors
|
|
|
|
files = append(files, st...)
|
|
|
|
} else if pt, _ := glob(filepath.Join(path, "pytorch_model*.bin"), "application/zip"); len(pt) > 0 {
|
|
|
|
// pytorch files might also be unresolved git lfs references; skip if they are
|
|
|
|
// covers pytorch_model-x-of-y.bin, pytorch_model.fp32-x-of-y.bin, pytorch_model.bin
|
|
|
|
files = append(files, pt...)
|
2024-05-08 23:07:46 +00:00
|
|
|
} else if pt, _ := glob(filepath.Join(path, "consolidated*.pth"), "application/zip"); len(pt) > 0 {
|
2024-04-25 21:41:30 +00:00
|
|
|
// pytorch files might also be unresolved git lfs references; skip if they are
|
|
|
|
// covers consolidated.x.pth, consolidated.pth
|
|
|
|
files = append(files, pt...)
|
|
|
|
} else {
|
|
|
|
return "", errors.New("no safetensors or torch files found")
|
|
|
|
}
|
|
|
|
|
|
|
|
// add configuration files, json files are detected as text/plain
|
|
|
|
js, err := glob(filepath.Join(path, "*.json"), "text/plain")
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
files = append(files, js...)
|
|
|
|
|
|
|
|
if tks, _ := glob(filepath.Join(path, "tokenizer.model"), "application/octet-stream"); len(tks) > 0 {
|
|
|
|
// add tokenizer.model if it exists, tokenizer.json is automatically picked up by the previous glob
|
|
|
|
// tokenizer.model might be a unresolved git lfs reference; error if it is
|
|
|
|
files = append(files, tks...)
|
|
|
|
} else if tks, _ := glob(filepath.Join(path, "**/tokenizer.model"), "text/plain"); len(tks) > 0 {
|
|
|
|
// some times tokenizer.model is in a subdirectory (e.g. meta-llama/Meta-Llama-3-8B)
|
|
|
|
files = append(files, tks...)
|
|
|
|
}
|
|
|
|
|
2024-06-27 04:38:21 +00:00
|
|
|
zipfile := zip.NewWriter(tempfile)
|
|
|
|
defer zipfile.Close()
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
for _, file := range files {
|
|
|
|
f, err := os.Open(file)
|
|
|
|
if err != nil {
|
2024-04-22 18:02:25 +00:00
|
|
|
return "", err
|
|
|
|
}
|
2024-04-25 21:41:30 +00:00
|
|
|
defer f.Close()
|
2024-04-22 18:02:25 +00:00
|
|
|
|
|
|
|
fi, err := f.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
zfi, err := zip.FileInfoHeader(fi)
|
2024-04-22 18:02:25 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
zf, err := zipfile.CreateHeader(zfi)
|
2024-04-22 18:02:25 +00:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2024-04-25 21:41:30 +00:00
|
|
|
if _, err := io.Copy(zf, f); err != nil {
|
2024-04-22 18:02:25 +00:00
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return tempfile.Name(), nil
|
|
|
|
}
|
|
|
|
|
2024-03-07 05:01:51 +00:00
|
|
|
func createBlob(cmd *cobra.Command, client *api.Client, path string) (string, error) {
|
|
|
|
bin, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
defer bin.Close()
|
|
|
|
|
|
|
|
hash := sha256.New()
|
|
|
|
if _, err := io.Copy(hash, bin); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2024-03-29 01:54:01 +00:00
|
|
|
|
|
|
|
if _, err := bin.Seek(0, io.SeekStart); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2024-03-07 05:01:51 +00:00
|
|
|
|
|
|
|
digest := fmt.Sprintf("sha256:%x", hash.Sum(nil))
|
|
|
|
if err = client.CreateBlob(cmd.Context(), digest, bin); err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return digest, nil
|
|
|
|
}
|
|
|
|
|
2023-07-25 16:07:27 +00:00
|
|
|
func RunHandler(cmd *cobra.Command, args []string) error {
|
2024-02-02 05:33:06 +00:00
|
|
|
interactive := true
|
|
|
|
|
|
|
|
opts := runOptions{
|
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 03:14:03 +00:00
|
|
|
Model: args[0],
|
|
|
|
WordWrap: os.Getenv("TERM") == "xterm-256color",
|
|
|
|
Options: map[string]interface{}{},
|
2024-02-02 05:33:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
format, err := cmd.Flags().GetString("format")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
opts.Format = format
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
keepAlive, err := cmd.Flags().GetString("keepalive")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if keepAlive != "" {
|
|
|
|
d, err := time.ParseDuration(keepAlive)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
opts.KeepAlive = &api.Duration{Duration: d}
|
|
|
|
}
|
|
|
|
|
2024-02-02 05:33:06 +00:00
|
|
|
prompts := args[1:]
|
|
|
|
// prepend stdin to the prompt if provided
|
|
|
|
if !term.IsTerminal(int(os.Stdin.Fd())) {
|
|
|
|
in, err := io.ReadAll(os.Stdin)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
prompts = append([]string{string(in)}, prompts...)
|
|
|
|
opts.WordWrap = false
|
|
|
|
interactive = false
|
|
|
|
}
|
|
|
|
opts.Prompt = strings.Join(prompts, " ")
|
|
|
|
if len(prompts) > 0 {
|
|
|
|
interactive = false
|
|
|
|
}
|
|
|
|
|
|
|
|
nowrap, err := cmd.Flags().GetBool("nowordwrap")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
opts.WordWrap = !nowrap
|
|
|
|
|
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 03:14:03 +00:00
|
|
|
// Fill out the rest of the options based on information about the
|
|
|
|
// model.
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
name := args[0]
|
|
|
|
info, err := func() (*api.ShowResponse, error) {
|
|
|
|
showReq := &api.ShowRequest{Name: name}
|
|
|
|
info, err := client.Show(cmd.Context(), showReq)
|
|
|
|
var se api.StatusError
|
|
|
|
if errors.As(err, &se) && se.StatusCode == http.StatusNotFound {
|
|
|
|
if err := PullHandler(cmd, []string{name}); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return client.Show(cmd.Context(), &api.ShowRequest{Name: name})
|
|
|
|
}
|
|
|
|
return info, err
|
|
|
|
}()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2024-02-02 05:33:06 +00:00
|
|
|
}
|
|
|
|
|
cmd: defer stating model info until necessary (#5248)
This commit changes the 'ollama run' command to defer fetching model
information until it really needs it. That is, when in interactive mode.
It also removes one such case where the model information is fetch in
duplicate, just before calling generateInteractive and then again, first
thing, in generateInteractive.
This positively impacts the performance of the command:
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.168 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.220 total
; time ./before run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./before run llama3 'hi' 0.02s user 0.01s system 2% cpu 1.217 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 4% cpu 0.652 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 5% cpu 0.498 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with or would you like to chat?
./after run llama3 'hi' 0.01s user 0.01s system 3% cpu 0.479 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
; time ./after run llama3 'hi'
Hi! It's nice to meet you. Is there something I can help you with, or would you like to chat?
./after run llama3 'hi' 0.02s user 0.01s system 5% cpu 0.507 total
2024-06-25 03:14:03 +00:00
|
|
|
opts.MultiModal = slices.Contains(info.Details.Families, "clip")
|
|
|
|
opts.ParentModel = info.Details.ParentModel
|
|
|
|
opts.Messages = append(opts.Messages, info.Messages...)
|
|
|
|
|
|
|
|
if interactive {
|
|
|
|
return generateInteractive(cmd, opts)
|
|
|
|
}
|
|
|
|
return generate(cmd, opts)
|
2023-07-06 18:18:40 +00:00
|
|
|
}
|
|
|
|
|
2024-04-30 18:02:08 +00:00
|
|
|
func errFromUnknownKey(unknownKeyErr error) error {
|
|
|
|
// find SSH public key in the error message
|
|
|
|
sshKeyPattern := `ssh-\w+ [^\s"]+`
|
|
|
|
re := regexp.MustCompile(sshKeyPattern)
|
|
|
|
matches := re.FindStringSubmatch(unknownKeyErr.Error())
|
|
|
|
|
|
|
|
if len(matches) > 0 {
|
|
|
|
serverPubKey := matches[0]
|
|
|
|
|
|
|
|
localPubKey, err := auth.GetPublicKey()
|
|
|
|
if err != nil {
|
|
|
|
return unknownKeyErr
|
|
|
|
}
|
|
|
|
|
|
|
|
if runtime.GOOS == "linux" && serverPubKey != localPubKey {
|
|
|
|
// try the ollama service public key
|
|
|
|
svcPubKey, err := os.ReadFile("/usr/share/ollama/.ollama/id_ed25519.pub")
|
|
|
|
if err != nil {
|
|
|
|
return unknownKeyErr
|
|
|
|
}
|
|
|
|
localPubKey = strings.TrimSpace(string(svcPubKey))
|
|
|
|
}
|
|
|
|
|
|
|
|
// check if the returned public key matches the local public key, this prevents adding a remote key to the user's account
|
|
|
|
if serverPubKey != localPubKey {
|
|
|
|
return unknownKeyErr
|
|
|
|
}
|
|
|
|
|
|
|
|
var msg strings.Builder
|
|
|
|
msg.WriteString(unknownKeyErr.Error())
|
|
|
|
msg.WriteString("\n\nYour ollama key is:\n")
|
|
|
|
msg.WriteString(localPubKey)
|
|
|
|
msg.WriteString("\nAdd your key at:\n")
|
|
|
|
msg.WriteString("https://ollama.com/settings/keys")
|
|
|
|
|
|
|
|
return errors.New(msg.String())
|
|
|
|
}
|
|
|
|
|
|
|
|
return unknownKeyErr
|
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func PushHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2023-07-21 22:42:19 +00:00
|
|
|
insecure, err := cmd.Flags().GetBool("insecure")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.Stop()
|
|
|
|
|
|
|
|
bars := make(map[string]*progress.Bar)
|
2023-11-19 18:43:21 +00:00
|
|
|
var status string
|
|
|
|
var spinner *progress.Spinner
|
2023-11-15 00:33:24 +00:00
|
|
|
|
2023-07-19 01:51:30 +00:00
|
|
|
fn := func(resp api.ProgressResponse) error {
|
2023-11-15 00:33:24 +00:00
|
|
|
if resp.Digest != "" {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
bar, ok := bars[resp.Digest]
|
|
|
|
if !ok {
|
2023-11-20 16:37:17 +00:00
|
|
|
bar = progress.NewBar(fmt.Sprintf("pushing %s...", resp.Digest[7:19]), resp.Total, resp.Completed)
|
2023-11-15 00:33:24 +00:00
|
|
|
bars[resp.Digest] = bar
|
|
|
|
p.Add(resp.Digest, bar)
|
|
|
|
}
|
|
|
|
|
|
|
|
bar.Set(resp.Completed)
|
|
|
|
} else if status != resp.Status {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
status = resp.Status
|
|
|
|
spinner = progress.NewSpinner(status)
|
|
|
|
p.Add(status, spinner)
|
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
request := api.PushRequest{Name: args[0], Insecure: insecure}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Push(cmd.Context(), &request, fn); err != nil {
|
2024-04-30 18:02:08 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
|
|
|
if strings.Contains(err.Error(), "access denied") {
|
|
|
|
return errors.New("you are not authorized to push to this namespace, create the model under a namespace you own")
|
|
|
|
}
|
|
|
|
host := model.ParseName(args[0]).Host
|
|
|
|
isOllamaHost := strings.HasSuffix(host, ".ollama.ai") || strings.HasSuffix(host, ".ollama.com")
|
|
|
|
if strings.Contains(err.Error(), errtypes.UnknownOllamaKeyErrMsg) && isOllamaHost {
|
|
|
|
// the user has not added their ollama key to ollama.com
|
|
|
|
// re-throw an error with a more user-friendly message
|
|
|
|
return errFromUnknownKey(err)
|
|
|
|
}
|
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-11-16 00:59:49 +00:00
|
|
|
spinner.Stop()
|
2023-11-15 00:33:24 +00:00
|
|
|
return nil
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func ListHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-18 16:09:45 +00:00
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
models, err := client.List(cmd.Context())
|
2023-07-18 16:09:45 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var data [][]string
|
|
|
|
|
|
|
|
for _, m := range models.Models {
|
2023-07-18 21:01:19 +00:00
|
|
|
if len(args) == 0 || strings.HasPrefix(m.Name, args[0]) {
|
2023-11-14 22:57:41 +00:00
|
|
|
data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), format.HumanTime(m.ModifiedAt, "Never")})
|
2023-07-18 21:01:19 +00:00
|
|
|
}
|
2023-07-18 16:09:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
table := tablewriter.NewWriter(os.Stdout)
|
2023-08-29 03:50:24 +00:00
|
|
|
table.SetHeader([]string{"NAME", "ID", "SIZE", "MODIFIED"})
|
2023-07-18 16:09:45 +00:00
|
|
|
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetHeaderLine(false)
|
|
|
|
table.SetBorder(false)
|
|
|
|
table.SetNoWhiteSpace(true)
|
|
|
|
table.SetTablePadding("\t")
|
|
|
|
table.AppendBulk(data)
|
|
|
|
table.Render()
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
func ListRunningHandler(cmd *cobra.Command, args []string) error {
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
models, err := client.ListRunning(cmd.Context())
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var data [][]string
|
|
|
|
|
|
|
|
for _, m := range models.Models {
|
|
|
|
if len(args) == 0 || strings.HasPrefix(m.Name, args[0]) {
|
|
|
|
var procStr string
|
|
|
|
switch {
|
|
|
|
case m.SizeVRAM == 0:
|
|
|
|
procStr = "100% CPU"
|
|
|
|
case m.SizeVRAM == m.Size:
|
|
|
|
procStr = "100% GPU"
|
|
|
|
case m.SizeVRAM > m.Size || m.Size == 0:
|
|
|
|
procStr = "Unknown"
|
|
|
|
default:
|
|
|
|
sizeCPU := m.Size - m.SizeVRAM
|
|
|
|
cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 100)
|
|
|
|
procStr = fmt.Sprintf("%d%%/%d%% CPU/GPU", int(cpuPercent), int(100-cpuPercent))
|
|
|
|
}
|
|
|
|
data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), procStr, format.HumanTime(m.ExpiresAt, "Never")})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
table := tablewriter.NewWriter(os.Stdout)
|
|
|
|
table.SetHeader([]string{"NAME", "ID", "SIZE", "PROCESSOR", "UNTIL"})
|
|
|
|
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
table.SetHeaderLine(false)
|
|
|
|
table.SetBorder(false)
|
|
|
|
table.SetNoWhiteSpace(true)
|
|
|
|
table.SetTablePadding("\t")
|
|
|
|
table.AppendBulk(data)
|
|
|
|
table.Render()
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func DeleteHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-20 23:09:23 +00:00
|
|
|
|
2023-08-26 07:47:56 +00:00
|
|
|
for _, name := range args {
|
|
|
|
req := api.DeleteRequest{Name: name}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Delete(cmd.Context(), &req); err != nil {
|
2023-08-26 07:47:56 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
fmt.Printf("deleted '%s'\n", name)
|
2023-07-20 23:09:23 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-09-06 18:04:17 +00:00
|
|
|
func ShowHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-09-06 18:04:17 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
license, errLicense := cmd.Flags().GetBool("license")
|
|
|
|
modelfile, errModelfile := cmd.Flags().GetBool("modelfile")
|
|
|
|
parameters, errParams := cmd.Flags().GetBool("parameters")
|
|
|
|
system, errSystem := cmd.Flags().GetBool("system")
|
|
|
|
template, errTemplate := cmd.Flags().GetBool("template")
|
|
|
|
|
|
|
|
for _, boolErr := range []error{errLicense, errModelfile, errParams, errSystem, errTemplate} {
|
|
|
|
if boolErr != nil {
|
|
|
|
return errors.New("error retrieving flags")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
flagsSet := 0
|
|
|
|
showType := ""
|
|
|
|
|
|
|
|
if license {
|
|
|
|
flagsSet++
|
|
|
|
showType = "license"
|
|
|
|
}
|
|
|
|
|
|
|
|
if modelfile {
|
|
|
|
flagsSet++
|
|
|
|
showType = "modelfile"
|
|
|
|
}
|
|
|
|
|
|
|
|
if parameters {
|
|
|
|
flagsSet++
|
|
|
|
showType = "parameters"
|
|
|
|
}
|
|
|
|
|
|
|
|
if system {
|
|
|
|
flagsSet++
|
|
|
|
showType = "system"
|
|
|
|
}
|
|
|
|
|
|
|
|
if template {
|
|
|
|
flagsSet++
|
|
|
|
showType = "template"
|
|
|
|
}
|
|
|
|
|
|
|
|
if flagsSet > 1 {
|
2023-09-06 20:38:49 +00:00
|
|
|
return errors.New("only one of '--license', '--modelfile', '--parameters', '--system', or '--template' can be specified")
|
2024-06-19 21:19:02 +00:00
|
|
|
}
|
|
|
|
|
2024-06-28 20:15:52 +00:00
|
|
|
req := api.ShowRequest{Name: args[0]}
|
|
|
|
resp, err := client.Show(cmd.Context(), &req)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-06-19 21:19:02 +00:00
|
|
|
|
2024-06-28 20:15:52 +00:00
|
|
|
if flagsSet == 1 {
|
2024-06-19 21:19:02 +00:00
|
|
|
switch showType {
|
|
|
|
case "license":
|
|
|
|
fmt.Println(resp.License)
|
|
|
|
case "modelfile":
|
|
|
|
fmt.Println(resp.Modelfile)
|
|
|
|
case "parameters":
|
|
|
|
fmt.Println(resp.Parameters)
|
|
|
|
case "system":
|
|
|
|
fmt.Println(resp.System)
|
|
|
|
case "template":
|
|
|
|
fmt.Println(resp.Template)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2023-09-06 18:04:17 +00:00
|
|
|
}
|
|
|
|
|
2024-06-28 20:15:52 +00:00
|
|
|
showInfo(resp)
|
2023-09-06 18:04:17 +00:00
|
|
|
|
2024-06-28 20:15:52 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func showInfo(resp *api.ShowResponse) {
|
2024-06-19 21:19:02 +00:00
|
|
|
arch := resp.ModelInfo["general.architecture"].(string)
|
|
|
|
|
|
|
|
modelData := [][]string{
|
|
|
|
{"arch", arch},
|
|
|
|
{"parameters", resp.Details.ParameterSize},
|
|
|
|
{"quantization", resp.Details.QuantizationLevel},
|
|
|
|
{"context length", fmt.Sprintf("%v", resp.ModelInfo[fmt.Sprintf("%s.context_length", arch)].(float64))},
|
|
|
|
{"embedding length", fmt.Sprintf("%v", resp.ModelInfo[fmt.Sprintf("%s.embedding_length", arch)].(float64))},
|
|
|
|
}
|
|
|
|
|
|
|
|
mainTableData := [][]string{
|
|
|
|
{"Model"},
|
|
|
|
{renderSubTable(modelData, false)},
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.ProjectorInfo != nil {
|
|
|
|
projectorData := [][]string{
|
|
|
|
{"arch", "clip"},
|
|
|
|
{"parameters", format.HumanNumber(uint64(resp.ProjectorInfo["general.parameter_count"].(float64)))},
|
|
|
|
}
|
|
|
|
|
2024-06-28 18:30:16 +00:00
|
|
|
if projectorType, ok := resp.ProjectorInfo["clip.projector_type"]; ok {
|
|
|
|
projectorData = append(projectorData, []string{"projector type", projectorType.(string)})
|
|
|
|
}
|
|
|
|
|
|
|
|
projectorData = append(projectorData,
|
|
|
|
[]string{"embedding length", fmt.Sprintf("%v", resp.ProjectorInfo["clip.vision.embedding_length"].(float64))},
|
|
|
|
[]string{"projection dimensionality", fmt.Sprintf("%v", resp.ProjectorInfo["clip.vision.projection_dim"].(float64))},
|
|
|
|
)
|
|
|
|
|
2024-06-19 21:19:02 +00:00
|
|
|
mainTableData = append(mainTableData,
|
|
|
|
[]string{"Projector"},
|
|
|
|
[]string{renderSubTable(projectorData, false)},
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.Parameters != "" {
|
|
|
|
mainTableData = append(mainTableData, []string{"Parameters"}, []string{formatParams(resp.Parameters)})
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.System != "" {
|
|
|
|
mainTableData = append(mainTableData, []string{"System"}, []string{renderSubTable(twoLines(resp.System), true)})
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.License != "" {
|
|
|
|
mainTableData = append(mainTableData, []string{"License"}, []string{renderSubTable(twoLines(resp.License), true)})
|
|
|
|
}
|
|
|
|
|
|
|
|
table := tablewriter.NewWriter(os.Stdout)
|
|
|
|
table.SetAutoWrapText(false)
|
|
|
|
table.SetBorder(false)
|
|
|
|
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
|
|
|
|
for _, v := range mainTableData {
|
|
|
|
table.Append(v)
|
2023-09-06 18:04:17 +00:00
|
|
|
}
|
|
|
|
|
2024-06-19 21:19:02 +00:00
|
|
|
table.Render()
|
2023-09-06 18:04:17 +00:00
|
|
|
}
|
|
|
|
|
2024-06-19 21:19:02 +00:00
|
|
|
func renderSubTable(data [][]string, file bool) string {
|
|
|
|
var buf bytes.Buffer
|
|
|
|
table := tablewriter.NewWriter(&buf)
|
|
|
|
table.SetAutoWrapText(!file)
|
|
|
|
table.SetBorder(false)
|
|
|
|
table.SetNoWhiteSpace(true)
|
|
|
|
table.SetTablePadding("\t")
|
|
|
|
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
|
|
|
|
|
|
|
for _, v := range data {
|
|
|
|
table.Append(v)
|
|
|
|
}
|
|
|
|
|
|
|
|
table.Render()
|
|
|
|
|
|
|
|
renderedTable := buf.String()
|
|
|
|
lines := strings.Split(renderedTable, "\n")
|
|
|
|
for i, line := range lines {
|
|
|
|
lines[i] = "\t" + line
|
|
|
|
}
|
|
|
|
|
|
|
|
return strings.Join(lines, "\n")
|
|
|
|
}
|
|
|
|
|
|
|
|
func twoLines(s string) [][]string {
|
|
|
|
lines := strings.Split(s, "\n")
|
|
|
|
res := [][]string{}
|
|
|
|
|
|
|
|
count := 0
|
|
|
|
for _, line := range lines {
|
|
|
|
line = strings.TrimSpace(line)
|
|
|
|
if line != "" {
|
|
|
|
count++
|
|
|
|
res = append(res, []string{line})
|
|
|
|
if count == 2 {
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
|
|
|
func formatParams(s string) string {
|
|
|
|
lines := strings.Split(s, "\n")
|
|
|
|
table := [][]string{}
|
|
|
|
|
|
|
|
for _, line := range lines {
|
|
|
|
table = append(table, strings.Fields(line))
|
|
|
|
}
|
|
|
|
return renderSubTable(table, false)
|
|
|
|
}
|
|
|
|
|
2023-07-24 15:27:28 +00:00
|
|
|
func CopyHandler(cmd *cobra.Command, args []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-24 15:27:28 +00:00
|
|
|
|
|
|
|
req := api.CopyRequest{Source: args[0], Destination: args[1]}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Copy(cmd.Context(), &req); err != nil {
|
2023-07-24 15:27:28 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
fmt.Printf("copied '%s' to '%s'\n", args[0], args[1])
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
func PullHandler(cmd *cobra.Command, args []string) error {
|
2023-07-21 22:42:19 +00:00
|
|
|
insecure, err := cmd.Flags().GetBool("insecure")
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.Stop()
|
|
|
|
|
|
|
|
bars := make(map[string]*progress.Bar)
|
|
|
|
|
2023-11-19 18:43:21 +00:00
|
|
|
var status string
|
|
|
|
var spinner *progress.Spinner
|
2023-11-15 00:33:24 +00:00
|
|
|
|
2023-07-19 01:51:30 +00:00
|
|
|
fn := func(resp api.ProgressResponse) error {
|
2023-11-15 00:33:24 +00:00
|
|
|
if resp.Digest != "" {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
bar, ok := bars[resp.Digest]
|
|
|
|
if !ok {
|
2023-11-20 16:37:17 +00:00
|
|
|
bar = progress.NewBar(fmt.Sprintf("pulling %s...", resp.Digest[7:19]), resp.Total, resp.Completed)
|
2023-11-15 00:33:24 +00:00
|
|
|
bars[resp.Digest] = bar
|
|
|
|
p.Add(resp.Digest, bar)
|
|
|
|
}
|
|
|
|
|
|
|
|
bar.Set(resp.Completed)
|
|
|
|
} else if status != resp.Status {
|
2023-11-19 18:43:21 +00:00
|
|
|
if spinner != nil {
|
|
|
|
spinner.Stop()
|
|
|
|
}
|
2023-11-15 00:33:24 +00:00
|
|
|
|
|
|
|
status = resp.Status
|
|
|
|
spinner = progress.NewSpinner(status)
|
|
|
|
p.Add(status, spinner)
|
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
return nil
|
|
|
|
}
|
2023-07-07 14:22:37 +00:00
|
|
|
|
2023-11-15 00:33:24 +00:00
|
|
|
request := api.PullRequest{Name: args[0], Insecure: insecure}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Pull(cmd.Context(), &request, fn); err != nil {
|
2023-11-15 00:33:24 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2023-07-06 20:49:31 +00:00
|
|
|
}
|
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
type generateContextKey string
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
type runOptions struct {
|
2024-01-25 20:12:36 +00:00
|
|
|
Model string
|
|
|
|
ParentModel string
|
|
|
|
Prompt string
|
|
|
|
Messages []api.Message
|
|
|
|
WordWrap bool
|
|
|
|
Format string
|
|
|
|
System string
|
|
|
|
Images []api.ImageData
|
|
|
|
Options map[string]interface{}
|
|
|
|
MultiModal bool
|
2024-05-14 00:17:36 +00:00
|
|
|
KeepAlive *api.Duration
|
2023-11-29 17:56:42 +00:00
|
|
|
}
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
type displayResponseState struct {
|
|
|
|
lineLength int
|
|
|
|
wordBuffer string
|
|
|
|
}
|
|
|
|
|
|
|
|
func displayResponse(content string, wordWrap bool, state *displayResponseState) {
|
|
|
|
termWidth, _, _ := term.GetSize(int(os.Stdout.Fd()))
|
|
|
|
if wordWrap && termWidth >= 10 {
|
|
|
|
for _, ch := range content {
|
2024-05-16 00:24:17 +00:00
|
|
|
if state.lineLength+1 > termWidth-5 {
|
|
|
|
if runewidth.StringWidth(state.wordBuffer) > termWidth-10 {
|
2024-01-12 20:05:52 +00:00
|
|
|
fmt.Printf("%s%c", state.wordBuffer, ch)
|
|
|
|
state.wordBuffer = ""
|
|
|
|
state.lineLength = 0
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// backtrack the length of the last word and clear to the end of the line
|
2024-05-30 17:24:21 +00:00
|
|
|
a := runewidth.StringWidth(state.wordBuffer)
|
|
|
|
if a > 0 {
|
2024-05-30 17:38:07 +00:00
|
|
|
fmt.Printf("\x1b[%dD", a)
|
2024-05-30 17:24:21 +00:00
|
|
|
}
|
|
|
|
fmt.Printf("\x1b[K\n")
|
2024-01-12 20:05:52 +00:00
|
|
|
fmt.Printf("%s%c", state.wordBuffer, ch)
|
2024-05-15 23:29:33 +00:00
|
|
|
chWidth := runewidth.RuneWidth(ch)
|
|
|
|
|
|
|
|
state.lineLength = runewidth.StringWidth(state.wordBuffer) + chWidth
|
2024-01-12 20:05:52 +00:00
|
|
|
} else {
|
|
|
|
fmt.Print(string(ch))
|
2024-05-15 23:29:33 +00:00
|
|
|
state.lineLength += runewidth.RuneWidth(ch)
|
|
|
|
if runewidth.RuneWidth(ch) >= 2 {
|
|
|
|
state.wordBuffer = ""
|
|
|
|
continue
|
2024-05-16 00:24:17 +00:00
|
|
|
}
|
2024-01-12 20:05:52 +00:00
|
|
|
|
|
|
|
switch ch {
|
|
|
|
case ' ':
|
|
|
|
state.wordBuffer = ""
|
|
|
|
case '\n':
|
|
|
|
state.lineLength = 0
|
|
|
|
default:
|
|
|
|
state.wordBuffer += string(ch)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
fmt.Printf("%s%s", state.wordBuffer, content)
|
|
|
|
if len(state.wordBuffer) > 0 {
|
|
|
|
state.wordBuffer = ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func chat(cmd *cobra.Command, opts runOptions) (*api.Message, error) {
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
p := progress.NewProgress(os.Stderr)
|
|
|
|
defer p.StopAndClear()
|
|
|
|
|
|
|
|
spinner := progress.NewSpinner("")
|
|
|
|
p.Add("", spinner)
|
|
|
|
|
|
|
|
cancelCtx, cancel := context.WithCancel(cmd.Context())
|
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
sigChan := make(chan os.Signal, 1)
|
|
|
|
signal.Notify(sigChan, syscall.SIGINT)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
<-sigChan
|
|
|
|
cancel()
|
|
|
|
}()
|
|
|
|
|
|
|
|
var state *displayResponseState = &displayResponseState{}
|
|
|
|
var latest api.ChatResponse
|
|
|
|
var fullResponse strings.Builder
|
|
|
|
var role string
|
|
|
|
|
|
|
|
fn := func(response api.ChatResponse) error {
|
|
|
|
p.StopAndClear()
|
|
|
|
|
|
|
|
latest = response
|
|
|
|
|
|
|
|
role = response.Message.Role
|
|
|
|
content := response.Message.Content
|
|
|
|
fullResponse.WriteString(content)
|
|
|
|
|
|
|
|
displayResponse(content, opts.WordWrap, state)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
req := &api.ChatRequest{
|
|
|
|
Model: opts.Model,
|
|
|
|
Messages: opts.Messages,
|
|
|
|
Format: opts.Format,
|
|
|
|
Options: opts.Options,
|
|
|
|
}
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
if opts.KeepAlive != nil {
|
|
|
|
req.KeepAlive = opts.KeepAlive
|
|
|
|
}
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
if err := client.Chat(cancelCtx, req, fn); err != nil {
|
|
|
|
if errors.Is(err, context.Canceled) {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(opts.Messages) > 0 {
|
|
|
|
fmt.Println()
|
|
|
|
fmt.Println()
|
|
|
|
}
|
|
|
|
|
|
|
|
verbose, err := cmd.Flags().GetBool("verbose")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if verbose {
|
|
|
|
latest.Summary()
|
|
|
|
}
|
|
|
|
|
|
|
|
return &api.Message{Role: role, Content: fullResponse.String()}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func generate(cmd *cobra.Command, opts runOptions) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-09-18 19:26:56 +00:00
|
|
|
if err != nil {
|
2023-12-05 00:35:29 +00:00
|
|
|
return err
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2023-07-07 17:12:58 +00:00
|
|
|
|
2023-11-15 00:58:51 +00:00
|
|
|
p := progress.NewProgress(os.Stderr)
|
2023-11-20 05:49:08 +00:00
|
|
|
defer p.StopAndClear()
|
2023-12-05 00:35:29 +00:00
|
|
|
|
2023-11-15 00:58:51 +00:00
|
|
|
spinner := progress.NewSpinner("")
|
|
|
|
p.Add("", spinner)
|
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
var latest api.GenerateResponse
|
|
|
|
|
|
|
|
generateContext, ok := cmd.Context().Value(generateContextKey("context")).([]int)
|
|
|
|
if !ok {
|
|
|
|
generateContext = []int{}
|
|
|
|
}
|
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
ctx, cancel := context.WithCancel(cmd.Context())
|
2023-09-29 00:13:01 +00:00
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
sigChan := make(chan os.Signal, 1)
|
|
|
|
signal.Notify(sigChan, syscall.SIGINT)
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
<-sigChan
|
|
|
|
cancel()
|
|
|
|
}()
|
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
var state *displayResponseState = &displayResponseState{}
|
2023-09-22 20:36:08 +00:00
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
fn := func(response api.GenerateResponse) error {
|
2023-11-15 00:58:51 +00:00
|
|
|
p.StopAndClear()
|
2023-12-05 00:35:29 +00:00
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
latest = response
|
2024-01-12 20:05:52 +00:00
|
|
|
content := response.Response
|
2023-07-13 01:18:06 +00:00
|
|
|
|
2024-01-12 20:05:52 +00:00
|
|
|
displayResponse(content, opts.WordWrap, state)
|
2023-09-22 20:36:08 +00:00
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
return nil
|
|
|
|
}
|
2023-07-07 21:04:43 +00:00
|
|
|
|
2024-02-02 05:33:06 +00:00
|
|
|
if opts.MultiModal {
|
|
|
|
opts.Prompt, opts.Images, err = extractFileData(opts.Prompt)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
request := api.GenerateRequest{
|
2024-05-14 22:17:04 +00:00
|
|
|
Model: opts.Model,
|
|
|
|
Prompt: opts.Prompt,
|
|
|
|
Context: generateContext,
|
|
|
|
Images: opts.Images,
|
|
|
|
Format: opts.Format,
|
|
|
|
System: opts.System,
|
|
|
|
Options: opts.Options,
|
|
|
|
KeepAlive: opts.KeepAlive,
|
2023-10-12 22:56:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if err := client.Generate(ctx, &request, fn); err != nil {
|
2024-01-05 20:22:32 +00:00
|
|
|
if errors.Is(err, context.Canceled) {
|
2023-12-05 00:35:29 +00:00
|
|
|
return nil
|
2023-07-07 21:04:43 +00:00
|
|
|
}
|
2024-01-05 20:22:32 +00:00
|
|
|
return err
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2024-01-05 20:22:32 +00:00
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
if opts.Prompt != "" {
|
2023-07-07 17:12:58 +00:00
|
|
|
fmt.Println()
|
|
|
|
fmt.Println()
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2023-07-13 01:18:06 +00:00
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
if !latest.Done {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
verbose, err := cmd.Flags().GetBool("verbose")
|
|
|
|
if err != nil {
|
2023-12-05 00:35:29 +00:00
|
|
|
return err
|
2023-09-18 19:26:56 +00:00
|
|
|
}
|
2023-07-18 18:59:42 +00:00
|
|
|
|
2023-09-18 19:26:56 +00:00
|
|
|
if verbose {
|
|
|
|
latest.Summary()
|
2023-07-07 17:12:58 +00:00
|
|
|
}
|
2023-07-06 20:49:31 +00:00
|
|
|
|
2023-12-11 21:56:22 +00:00
|
|
|
ctx = context.WithValue(cmd.Context(), generateContextKey("context"), latest.Context)
|
|
|
|
cmd.SetContext(ctx)
|
|
|
|
|
2023-12-05 00:35:29 +00:00
|
|
|
return nil
|
2023-07-06 20:49:31 +00:00
|
|
|
}
|
|
|
|
|
2023-08-10 21:17:53 +00:00
|
|
|
func RunServer(cmd *cobra.Command, _ []string) error {
|
2023-09-21 00:49:48 +00:00
|
|
|
if err := initializeKeypair(); err != nil {
|
2023-08-11 17:58:23 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-06-12 22:48:16 +00:00
|
|
|
ln, err := net.Listen("tcp", net.JoinHostPort(envconfig.Host.Host, envconfig.Host.Port))
|
2023-08-07 03:34:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-07-04 04:47:00 +00:00
|
|
|
|
2024-05-06 23:01:37 +00:00
|
|
|
err = server.Serve(ln)
|
|
|
|
if errors.Is(err, http.ErrServerClosed) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
2023-07-04 04:47:00 +00:00
|
|
|
}
|
|
|
|
|
2023-08-11 17:58:23 +00:00
|
|
|
func initializeKeypair() error {
|
|
|
|
home, err := os.UserHomeDir()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
privKeyPath := filepath.Join(home, ".ollama", "id_ed25519")
|
|
|
|
pubKeyPath := filepath.Join(home, ".ollama", "id_ed25519.pub")
|
|
|
|
|
|
|
|
_, err = os.Stat(privKeyPath)
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
fmt.Printf("Couldn't find '%s'. Generating new private key.\n", privKeyPath)
|
2024-02-24 00:50:41 +00:00
|
|
|
cryptoPublicKey, cryptoPrivateKey, err := ed25519.GenerateKey(rand.Reader)
|
2023-08-11 17:58:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
privateKeyBytes, err := ssh.MarshalPrivateKey(cryptoPrivateKey, "")
|
2023-08-11 17:58:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
if err := os.MkdirAll(filepath.Dir(privKeyPath), 0o755); err != nil {
|
2023-08-11 22:35:55 +00:00
|
|
|
return fmt.Errorf("could not create directory %w", err)
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
if err := os.WriteFile(privKeyPath, pem.EncodeToMemory(privateKeyBytes), 0o600); err != nil {
|
2023-08-11 17:58:23 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
sshPublicKey, err := ssh.NewPublicKey(cryptoPublicKey)
|
2023-08-11 17:58:23 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
publicKeyBytes := ssh.MarshalAuthorizedKey(sshPublicKey)
|
2023-08-11 17:58:23 +00:00
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
if err := os.WriteFile(pubKeyPath, publicKeyBytes, 0o644); err != nil {
|
2023-08-11 17:58:23 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-24 00:50:41 +00:00
|
|
|
fmt.Printf("Your new public key is: \n\n%s\n", publicKeyBytes)
|
2023-08-11 17:58:23 +00:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 22:56:40 +00:00
|
|
|
func checkServerHeartbeat(cmd *cobra.Command, _ []string) error {
|
2023-10-09 19:18:26 +00:00
|
|
|
client, err := api.ClientFromEnvironment()
|
2023-08-16 15:03:48 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2023-10-12 22:56:40 +00:00
|
|
|
if err := client.Heartbeat(cmd.Context()); err != nil {
|
2023-12-27 00:03:45 +00:00
|
|
|
if !strings.Contains(err.Error(), " refused") {
|
2023-07-31 21:38:10 +00:00
|
|
|
return err
|
|
|
|
}
|
2023-12-27 00:03:45 +00:00
|
|
|
if err := startApp(cmd.Context(), client); err != nil {
|
|
|
|
return fmt.Errorf("could not connect to ollama app, is it running?")
|
2023-07-31 20:25:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2023-11-22 17:41:02 +00:00
|
|
|
func versionHandler(cmd *cobra.Command, _ []string) {
|
|
|
|
client, err := api.ClientFromEnvironment()
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
serverVersion, err := client.Version(cmd.Context())
|
|
|
|
if err != nil {
|
2023-12-01 20:10:27 +00:00
|
|
|
fmt.Println("Warning: could not connect to a running Ollama instance")
|
|
|
|
}
|
|
|
|
|
|
|
|
if serverVersion != "" {
|
|
|
|
fmt.Printf("ollama version is %s\n", serverVersion)
|
2023-11-22 17:41:02 +00:00
|
|
|
}
|
|
|
|
|
2023-10-16 16:57:19 +00:00
|
|
|
if serverVersion != version.Version {
|
2023-12-01 20:10:27 +00:00
|
|
|
fmt.Printf("Warning: client version is %s\n", version.Version)
|
2023-10-16 16:57:19 +00:00
|
|
|
}
|
2023-11-22 17:41:02 +00:00
|
|
|
}
|
|
|
|
|
2024-05-24 21:57:15 +00:00
|
|
|
func appendEnvDocs(cmd *cobra.Command, envs []envconfig.EnvVar) {
|
2024-05-18 18:51:57 +00:00
|
|
|
if len(envs) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
envUsage := `
|
2024-03-07 21:57:07 +00:00
|
|
|
Environment Variables:
|
|
|
|
`
|
2024-05-18 18:51:57 +00:00
|
|
|
for _, e := range envs {
|
2024-05-24 21:57:15 +00:00
|
|
|
envUsage += fmt.Sprintf(" %-24s %s\n", e.Name, e.Description)
|
2024-05-18 18:51:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
cmd.SetUsageTemplate(cmd.UsageTemplate() + envUsage)
|
2024-03-07 21:57:07 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
func NewCLI() *cobra.Command {
|
|
|
|
log.SetFlags(log.LstdFlags | log.Lshortfile)
|
2023-11-22 17:41:02 +00:00
|
|
|
cobra.EnableCommandSorting = false
|
2023-07-03 19:22:44 +00:00
|
|
|
|
2023-11-24 06:21:32 +00:00
|
|
|
if runtime.GOOS == "windows" {
|
2024-03-11 22:21:57 +00:00
|
|
|
console.ConsoleFromFile(os.Stdin) //nolint:errcheck
|
2023-11-24 06:21:32 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
rootCmd := &cobra.Command{
|
2023-08-14 18:15:53 +00:00
|
|
|
Use: "ollama",
|
|
|
|
Short: "Large language model runner",
|
|
|
|
SilenceUsage: true,
|
|
|
|
SilenceErrors: true,
|
2023-07-03 19:22:44 +00:00
|
|
|
CompletionOptions: cobra.CompletionOptions{
|
|
|
|
DisableDefaultCmd: true,
|
|
|
|
},
|
2023-11-22 17:41:02 +00:00
|
|
|
Run: func(cmd *cobra.Command, args []string) {
|
|
|
|
if version, _ := cmd.Flags().GetBool("version"); version {
|
|
|
|
versionHandler(cmd, args)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.Print(cmd.UsageString())
|
|
|
|
},
|
2023-07-03 19:22:44 +00:00
|
|
|
}
|
|
|
|
|
2023-11-22 17:41:02 +00:00
|
|
|
rootCmd.Flags().BoolP("version", "v", false, "Show version information")
|
2023-07-03 19:22:44 +00:00
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
createCmd := &cobra.Command{
|
2023-07-31 20:25:57 +00:00
|
|
|
Use: "create MODEL",
|
|
|
|
Short: "Create a model from a Modelfile",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: CreateHandler,
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2024-05-11 21:47:49 +00:00
|
|
|
createCmd.Flags().StringP("file", "f", "Modelfile", "Name of the Modelfile")
|
2024-05-10 20:06:13 +00:00
|
|
|
createCmd.Flags().StringP("quantize", "q", "", "Quantize model to this level (e.g. q4_0)")
|
2023-07-17 00:02:22 +00:00
|
|
|
|
2023-09-06 18:04:17 +00:00
|
|
|
showCmd := &cobra.Command{
|
|
|
|
Use: "show MODEL",
|
|
|
|
Short: "Show information for a model",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-09-06 18:04:17 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: ShowHandler,
|
|
|
|
}
|
|
|
|
|
|
|
|
showCmd.Flags().Bool("license", false, "Show license of a model")
|
|
|
|
showCmd.Flags().Bool("modelfile", false, "Show Modelfile of a model")
|
|
|
|
showCmd.Flags().Bool("parameters", false, "Show parameters of a model")
|
|
|
|
showCmd.Flags().Bool("template", false, "Show template of a model")
|
2023-12-12 19:43:19 +00:00
|
|
|
showCmd.Flags().Bool("system", false, "Show system message of a model")
|
2023-09-06 18:04:17 +00:00
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
runCmd := &cobra.Command{
|
2024-04-15 23:58:00 +00:00
|
|
|
Use: "run MODEL [PROMPT]",
|
|
|
|
Short: "Run a model",
|
|
|
|
Args: cobra.MinimumNArgs(1),
|
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: RunHandler,
|
2023-07-03 19:22:44 +00:00
|
|
|
}
|
|
|
|
|
2024-05-14 00:17:36 +00:00
|
|
|
runCmd.Flags().String("keepalive", "", "Duration to keep a model loaded (e.g. 5m)")
|
2023-07-13 01:18:06 +00:00
|
|
|
runCmd.Flags().Bool("verbose", false, "Show timings for response")
|
2023-08-22 04:56:56 +00:00
|
|
|
runCmd.Flags().Bool("insecure", false, "Use an insecure registry")
|
2023-09-22 20:36:08 +00:00
|
|
|
runCmd.Flags().Bool("nowordwrap", false, "Don't wrap words to the next line automatically")
|
2023-11-14 02:54:02 +00:00
|
|
|
runCmd.Flags().String("format", "", "Response format (e.g. json)")
|
2023-07-03 19:22:44 +00:00
|
|
|
serveCmd := &cobra.Command{
|
|
|
|
Use: "serve",
|
|
|
|
Aliases: []string{"start"},
|
|
|
|
Short: "Start ollama",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(0),
|
2023-07-06 20:49:31 +00:00
|
|
|
RunE: RunServer,
|
2023-07-03 19:22:44 +00:00
|
|
|
}
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
pullCmd := &cobra.Command{
|
2023-07-31 20:25:57 +00:00
|
|
|
Use: "pull MODEL",
|
|
|
|
Short: "Pull a model from a registry",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: PullHandler,
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2023-07-21 22:42:19 +00:00
|
|
|
pullCmd.Flags().Bool("insecure", false, "Use an insecure registry")
|
|
|
|
|
2023-07-17 00:02:22 +00:00
|
|
|
pushCmd := &cobra.Command{
|
2023-07-31 20:25:57 +00:00
|
|
|
Use: "push MODEL",
|
|
|
|
Short: "Push a model to a registry",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(1),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: PushHandler,
|
2023-07-17 00:02:22 +00:00
|
|
|
}
|
|
|
|
|
2023-07-21 22:42:19 +00:00
|
|
|
pushCmd.Flags().Bool("insecure", false, "Use an insecure registry")
|
|
|
|
|
2023-07-18 16:09:45 +00:00
|
|
|
listCmd := &cobra.Command{
|
2023-07-21 22:42:19 +00:00
|
|
|
Use: "list",
|
2023-07-20 22:28:27 +00:00
|
|
|
Aliases: []string{"ls"},
|
2023-07-21 22:42:19 +00:00
|
|
|
Short: "List models",
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
2023-07-21 22:42:19 +00:00
|
|
|
RunE: ListHandler,
|
2023-07-20 23:09:23 +00:00
|
|
|
}
|
2024-05-14 00:17:36 +00:00
|
|
|
|
|
|
|
psCmd := &cobra.Command{
|
|
|
|
Use: "ps",
|
|
|
|
Short: "List running models",
|
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: ListRunningHandler,
|
|
|
|
}
|
|
|
|
|
2023-07-24 15:27:28 +00:00
|
|
|
copyCmd := &cobra.Command{
|
2024-05-01 19:39:05 +00:00
|
|
|
Use: "cp SOURCE DESTINATION",
|
2023-07-31 20:25:57 +00:00
|
|
|
Short: "Copy a model",
|
2023-10-18 18:57:22 +00:00
|
|
|
Args: cobra.ExactArgs(2),
|
2023-07-31 20:25:57 +00:00
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: CopyHandler,
|
2023-07-24 15:27:28 +00:00
|
|
|
}
|
|
|
|
|
2023-07-20 23:09:23 +00:00
|
|
|
deleteCmd := &cobra.Command{
|
2023-10-18 18:57:22 +00:00
|
|
|
Use: "rm MODEL [MODEL...]",
|
2023-07-31 20:25:57 +00:00
|
|
|
Short: "Remove a model",
|
|
|
|
Args: cobra.MinimumNArgs(1),
|
|
|
|
PreRunE: checkServerHeartbeat,
|
|
|
|
RunE: DeleteHandler,
|
2023-07-18 16:09:45 +00:00
|
|
|
}
|
|
|
|
|
2024-05-24 21:57:15 +00:00
|
|
|
envVars := envconfig.AsMap()
|
|
|
|
|
|
|
|
envs := []envconfig.EnvVar{envVars["OLLAMA_HOST"]}
|
2024-05-18 18:51:57 +00:00
|
|
|
|
2024-03-07 21:57:07 +00:00
|
|
|
for _, cmd := range []*cobra.Command{
|
|
|
|
createCmd,
|
|
|
|
showCmd,
|
|
|
|
runCmd,
|
|
|
|
pullCmd,
|
|
|
|
pushCmd,
|
|
|
|
listCmd,
|
2024-05-14 00:17:36 +00:00
|
|
|
psCmd,
|
2024-03-07 21:57:07 +00:00
|
|
|
copyCmd,
|
|
|
|
deleteCmd,
|
2024-05-24 21:57:15 +00:00
|
|
|
serveCmd,
|
2024-03-07 21:57:07 +00:00
|
|
|
} {
|
2024-05-18 18:51:57 +00:00
|
|
|
switch cmd {
|
|
|
|
case runCmd:
|
2024-05-24 21:57:15 +00:00
|
|
|
appendEnvDocs(cmd, []envconfig.EnvVar{envVars["OLLAMA_HOST"], envVars["OLLAMA_NOHISTORY"]})
|
|
|
|
case serveCmd:
|
|
|
|
appendEnvDocs(cmd, []envconfig.EnvVar{
|
|
|
|
envVars["OLLAMA_DEBUG"],
|
|
|
|
envVars["OLLAMA_HOST"],
|
|
|
|
envVars["OLLAMA_KEEP_ALIVE"],
|
|
|
|
envVars["OLLAMA_MAX_LOADED_MODELS"],
|
|
|
|
envVars["OLLAMA_MAX_QUEUE"],
|
|
|
|
envVars["OLLAMA_MODELS"],
|
|
|
|
envVars["OLLAMA_NUM_PARALLEL"],
|
|
|
|
envVars["OLLAMA_NOPRUNE"],
|
|
|
|
envVars["OLLAMA_ORIGINS"],
|
2024-07-23 22:14:28 +00:00
|
|
|
envVars["OLLAMA_SCHED_SPREAD"],
|
2024-05-24 21:57:15 +00:00
|
|
|
envVars["OLLAMA_TMPDIR"],
|
2024-05-30 16:36:51 +00:00
|
|
|
envVars["OLLAMA_FLASH_ATTENTION"],
|
|
|
|
envVars["OLLAMA_LLM_LIBRARY"],
|
2024-05-24 21:57:15 +00:00
|
|
|
})
|
2024-05-18 18:51:57 +00:00
|
|
|
default:
|
|
|
|
appendEnvDocs(cmd, envs)
|
|
|
|
}
|
2024-03-07 21:57:07 +00:00
|
|
|
}
|
|
|
|
|
2023-07-03 19:22:44 +00:00
|
|
|
rootCmd.AddCommand(
|
|
|
|
serveCmd,
|
2023-07-17 00:02:22 +00:00
|
|
|
createCmd,
|
2023-09-06 18:04:17 +00:00
|
|
|
showCmd,
|
2023-07-03 21:14:20 +00:00
|
|
|
runCmd,
|
2023-07-17 00:02:22 +00:00
|
|
|
pullCmd,
|
|
|
|
pushCmd,
|
2023-07-18 16:09:45 +00:00
|
|
|
listCmd,
|
2024-05-14 00:17:36 +00:00
|
|
|
psCmd,
|
2023-07-24 15:27:28 +00:00
|
|
|
copyCmd,
|
2023-07-20 23:09:23 +00:00
|
|
|
deleteCmd,
|
2023-07-03 19:22:44 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return rootCmd
|
|
|
|
}
|