generate progress

This commit is contained in:
Michael Yang 2023-07-06 15:43:04 -07:00
parent b0e63bfb4c
commit b0618a466e
3 changed files with 67 additions and 21 deletions

View file

@ -3,11 +3,13 @@ package cmd
import ( import (
"bufio" "bufio"
"context" "context"
"errors"
"fmt" "fmt"
"log" "log"
"net" "net"
"os" "os"
"path" "path"
"time"
"github.com/schollz/progressbar/v3" "github.com/schollz/progressbar/v3"
"github.com/spf13/cobra" "github.com/spf13/cobra"
@ -27,11 +29,18 @@ func cacheDir() string {
} }
func RunRun(cmd *cobra.Command, args []string) error { func RunRun(cmd *cobra.Command, args []string) error {
if err := pull(args[0]); err != nil { _, err := os.Stat(args[0])
switch {
case errors.Is(err, os.ErrNotExist):
if err := pull(args[0]); err != nil {
return err
}
fmt.Println("Up to date.")
case err != nil:
return err return err
} }
fmt.Println("Up to date.")
return RunGenerate(cmd, args) return RunGenerate(cmd, args)
} }
@ -54,7 +63,7 @@ func pull(model string) error {
func RunGenerate(_ *cobra.Command, args []string) error { func RunGenerate(_ *cobra.Command, args []string) error {
if len(args) > 1 { if len(args) > 1 {
return generate(args[0], args[1:]...) return generateOneshot(args[0], args[1:]...)
} }
if term.IsTerminal(int(os.Stdin.Fd())) { if term.IsTerminal(int(os.Stdin.Fd())) {
@ -64,21 +73,53 @@ func RunGenerate(_ *cobra.Command, args []string) error {
return generateBatch(args[0]) return generateBatch(args[0])
} }
func generate(model string, prompts ...string) error { func generate(model, prompt string) error {
client := api.NewClient() client := api.NewClient()
for _, prompt := range prompts { spinner := progressbar.NewOptions(-1,
client.Generate(context.Background(), &api.GenerateRequest{Model: model, Prompt: prompt}, func(resp api.GenerateResponse) error { progressbar.OptionSetWriter(os.Stderr),
fmt.Print(resp.Response) progressbar.OptionThrottle(60*time.Millisecond),
return nil progressbar.OptionSpinnerType(14),
}) progressbar.OptionSetRenderBlankState(true),
} progressbar.OptionSetElapsedTime(false),
progressbar.OptionClearOnFinish(),
)
go func() {
for range time.Tick(60 * time.Millisecond) {
if spinner.IsFinished() {
break
}
spinner.Add(1)
}
}()
client.Generate(context.Background(), &api.GenerateRequest{Model: model, Prompt: prompt}, func(resp api.GenerateResponse) error {
if !spinner.IsFinished() {
spinner.Finish()
}
fmt.Print(resp.Response)
return nil
})
fmt.Println() fmt.Println()
fmt.Println() fmt.Println()
return nil return nil
} }
func generateOneshot(model string, prompts ...string) error {
for _, prompt := range prompts {
fmt.Printf(">>> %s\n", prompt)
if err := generate(model, prompt); err != nil {
return err
}
}
return nil
}
func generateInteractive(model string) error { func generateInteractive(model string) error {
fmt.Print(">>> ") fmt.Print(">>> ")
scanner := bufio.NewScanner(os.Stdin) scanner := bufio.NewScanner(os.Stdin)

View file

@ -4,7 +4,6 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"io/ioutil"
"net/http" "net/http"
"os" "os"
"path" "path"
@ -30,6 +29,15 @@ type Model struct {
License string `json:"license"` License string `json:"license"`
} }
func (m *Model) FullName() string {
home, err := os.UserHomeDir()
if err != nil {
panic(err)
}
return path.Join(home, ".ollama", "models", m.Name+".bin")
}
func pull(model string, progressCh chan<- api.PullProgress) error { func pull(model string, progressCh chan<- api.PullProgress) error {
remote, err := getRemote(model) remote, err := getRemote(model)
if err != nil { if err != nil {
@ -45,7 +53,7 @@ func getRemote(model string) (*Model, error) {
return nil, fmt.Errorf("failed to get directory: %w", err) return nil, fmt.Errorf("failed to get directory: %w", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to read directory: %w", err) return nil, fmt.Errorf("failed to read directory: %w", err)
} }
@ -64,13 +72,6 @@ func getRemote(model string) (*Model, error) {
func saveModel(model *Model, progressCh chan<- api.PullProgress) error { func saveModel(model *Model, progressCh chan<- api.PullProgress) error {
// this models cache directory is created by the server on startup // this models cache directory is created by the server on startup
home, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("failed to get home directory: %w", err)
}
modelsCache := path.Join(home, ".ollama", "models")
fileName := path.Join(modelsCache, model.Name+".bin")
client := &http.Client{} client := &http.Client{}
req, err := http.NewRequest("GET", model.URL, nil) req, err := http.NewRequest("GET", model.URL, nil)
@ -79,7 +80,7 @@ func saveModel(model *Model, progressCh chan<- api.PullProgress) error {
} }
// check for resume // check for resume
alreadyDownloaded := int64(0) alreadyDownloaded := int64(0)
fileInfo, err := os.Stat(fileName) fileInfo, err := os.Stat(model.FullName())
if err != nil { if err != nil {
if !os.IsNotExist(err) { if !os.IsNotExist(err) {
return fmt.Errorf("failed to check resume model file: %w", err) return fmt.Errorf("failed to check resume model file: %w", err)
@ -111,7 +112,7 @@ func saveModel(model *Model, progressCh chan<- api.PullProgress) error {
return fmt.Errorf("failed to download model: %s", resp.Status) return fmt.Errorf("failed to download model: %s", resp.Status)
} }
out, err := os.OpenFile(fileName, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0o644) out, err := os.OpenFile(model.FullName(), os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0o644)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View file

@ -37,6 +37,10 @@ func generate(c *gin.Context) {
return return
} }
if remoteModel, _ := getRemote(req.Model); remoteModel != nil {
req.Model = remoteModel.FullName()
}
model, err := llama.New(req.Model, llama.EnableF16Memory, llama.SetContext(128), llama.EnableEmbeddings, llama.SetGPULayers(gpulayers)) model, err := llama.New(req.Model, llama.EnableF16Memory, llama.SetContext(128), llama.EnableEmbeddings, llama.SetGPULayers(gpulayers))
if err != nil { if err != nil {
fmt.Println("Loading the model failed:", err.Error()) fmt.Println("Loading the model failed:", err.Error())