ollama/server/routes.go

1505 lines
37 KiB
Go
Raw Normal View History

package server
import (
2024-06-17 17:38:55 +00:00
"bytes"
2024-04-30 17:55:19 +00:00
"cmp"
"context"
2023-07-06 17:40:11 +00:00
"encoding/json"
"errors"
"fmt"
"io"
"log/slog"
"math"
"net"
"net/http"
"net/netip"
"os"
"os/signal"
2023-07-15 00:27:14 +00:00
"path/filepath"
2024-05-22 04:30:52 +00:00
"slices"
2023-07-06 17:40:11 +00:00
"strings"
"syscall"
2023-07-13 01:18:06 +00:00
"time"
2023-07-22 01:01:24 +00:00
"github.com/gin-contrib/cors"
"github.com/gin-gonic/gin"
"golang.org/x/sync/errgroup"
"github.com/ollama/ollama/api"
"github.com/ollama/ollama/build"
"github.com/ollama/ollama/envconfig"
"github.com/ollama/ollama/gpu"
"github.com/ollama/ollama/llm"
"github.com/ollama/ollama/openai"
"github.com/ollama/ollama/parser"
"github.com/ollama/ollama/runners"
2024-06-10 21:54:42 +00:00
"github.com/ollama/ollama/template"
"github.com/ollama/ollama/types/errtypes"
2024-04-16 23:22:38 +00:00
"github.com/ollama/ollama/types/model"
"github.com/ollama/ollama/version"
)
2023-08-22 16:48:35 +00:00
var mode string = gin.DebugMode
2023-12-15 00:47:40 +00:00
type Server struct {
addr net.Addr
sched *Scheduler
2023-12-15 00:47:40 +00:00
}
2023-08-22 16:48:35 +00:00
func init() {
switch mode {
case gin.DebugMode:
case gin.ReleaseMode:
case gin.TestMode:
default:
mode = gin.DebugMode
}
gin.SetMode(mode)
}
2024-08-01 21:52:15 +00:00
var (
errRequired = errors.New("is required")
errBadTemplate = errors.New("template error")
)
2024-06-20 18:00:08 +00:00
func modelOptions(model *Model, requestOpts map[string]interface{}) (api.Options, error) {
opts := api.DefaultOptions()
if err := opts.FromMap(model.Options); err != nil {
return api.Options{}, err
}
if err := opts.FromMap(requestOpts); err != nil {
return api.Options{}, err
}
return opts, nil
}
// scheduleRunner schedules a runner after validating inputs such as capabilities and model options.
// It returns the allocated runner, model instance, and consolidated options if successful and error otherwise.
func (s *Server) scheduleRunner(ctx context.Context, name string, caps []Capability, requestOpts map[string]any, keepAlive *api.Duration) (llm.LlamaServer, *Model, *api.Options, error) {
2024-06-17 17:38:55 +00:00
if name == "" {
return nil, nil, nil, fmt.Errorf("model %w", errRequired)
}
2024-06-17 17:38:55 +00:00
model, err := GetModel(name)
if err != nil {
return nil, nil, nil, err
}
2024-06-17 17:38:55 +00:00
if err := model.CheckCapabilities(caps...); err != nil {
return nil, nil, nil, fmt.Errorf("%s %w", name, err)
}
2024-06-17 17:38:55 +00:00
opts, err := modelOptions(model, requestOpts)
if err != nil {
return nil, nil, nil, err
}
2024-06-17 17:38:55 +00:00
runnerCh, errCh := s.sched.GetRunner(ctx, model, opts, keepAlive)
var runner *runnerRef
select {
2024-06-17 17:38:55 +00:00
case runner = <-runnerCh:
case err = <-errCh:
return nil, nil, nil, err
}
return runner.llama, model, &opts, nil
2024-06-17 17:38:55 +00:00
}
func (s *Server) GenerateHandler(c *gin.Context) {
checkpointStart := time.Now()
2024-06-17 17:38:55 +00:00
var req api.GenerateRequest
if err := c.ShouldBindJSON(&req); errors.Is(err, io.EOF) {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
} else if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
2024-09-11 23:36:21 +00:00
// expire the runner
if req.Prompt == "" && req.KeepAlive != nil && int(req.KeepAlive.Seconds()) == 0 {
model, err := GetModel(req.Model)
if err != nil {
switch {
case os.IsNotExist(err):
c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
case err.Error() == "invalid model name":
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
default:
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
}
return
}
s.sched.expireRunner(model)
c.JSON(http.StatusOK, api.GenerateResponse{
Model: req.Model,
CreatedAt: time.Now().UTC(),
Response: "",
Done: true,
DoneReason: "unload",
})
return
}
2024-06-17 17:38:55 +00:00
if req.Format != "" && req.Format != "json" {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "format must be empty or \"json\""})
return
} else if req.Raw && (req.Template != "" || req.System != "" || len(req.Context) > 0) {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "raw mode does not support template, system, or context"})
2024-06-10 21:54:42 +00:00
return
}
2024-06-17 17:38:55 +00:00
caps := []Capability{CapabilityCompletion}
if req.Suffix != "" {
caps = append(caps, CapabilityInsert)
}
r, m, opts, err := s.scheduleRunner(c.Request.Context(), req.Model, caps, req.Options, req.KeepAlive)
2024-06-17 17:38:55 +00:00
if errors.Is(err, errCapabilityCompletion) {
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("%q does not support generate", req.Model)})
return
} else if err != nil {
2024-06-20 18:00:08 +00:00
handleScheduleError(c, req.Model, err)
return
}
checkpointLoaded := time.Now()
2024-06-20 18:00:08 +00:00
if req.Prompt == "" {
c.JSON(http.StatusOK, api.GenerateResponse{
Model: req.Model,
CreatedAt: time.Now().UTC(),
Done: true,
DoneReason: "load",
})
2024-06-17 17:38:55 +00:00
return
}
2024-06-17 17:38:55 +00:00
images := make([]llm.ImageData, len(req.Images))
for i := range req.Images {
images[i] = llm.ImageData{ID: i, Data: req.Images[i]}
}
2023-12-05 19:57:33 +00:00
2024-06-17 17:38:55 +00:00
prompt := req.Prompt
if !req.Raw {
tmpl := m.Template
2024-06-17 17:38:55 +00:00
if req.Template != "" {
tmpl, err = template.Parse(req.Template)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
}
var values template.Values
if req.Suffix != "" {
values.Prompt = prompt
values.Suffix = req.Suffix
} else {
var msgs []api.Message
if req.System != "" {
msgs = append(msgs, api.Message{Role: "system", Content: req.System})
} else if m.System != "" {
msgs = append(msgs, api.Message{Role: "system", Content: m.System})
}
2024-06-19 21:14:28 +00:00
if req.Context == nil {
msgs = append(msgs, m.Messages...)
}
for _, i := range images {
msgs = append(msgs, api.Message{Role: "user", Content: fmt.Sprintf("[img-%d]", i.ID)})
}
values.Messages = append(msgs, api.Message{Role: "user", Content: req.Prompt})
}
2024-06-19 21:14:28 +00:00
var b bytes.Buffer
if req.Context != nil {
2024-08-01 12:56:15 +00:00
s, err := r.Detokenize(c.Request.Context(), req.Context)
2024-06-19 21:14:28 +00:00
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
2024-08-01 20:50:05 +00:00
b.WriteString(s)
2024-06-19 21:14:28 +00:00
}
2024-08-01 20:50:05 +00:00
if err := tmpl.Execute(&b, values); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
prompt = b.String()
}
2024-06-17 17:38:55 +00:00
slog.Debug("generate request", "prompt", prompt, "images", images)
ch := make(chan any)
go func() {
// TODO (jmorganca): avoid building the response twice both here and below
var sb strings.Builder
defer close(ch)
if err := r.Completion(c.Request.Context(), llm.CompletionRequest{
2024-06-17 17:38:55 +00:00
Prompt: prompt,
Images: images,
Format: req.Format,
Options: opts,
}, func(cr llm.CompletionResponse) {
res := api.GenerateResponse{
2024-05-09 20:30:14 +00:00
Model: req.Model,
CreatedAt: time.Now().UTC(),
Response: cr.Content,
Done: cr.Done,
DoneReason: cr.DoneReason,
2023-12-05 19:57:33 +00:00
Metrics: api.Metrics{
PromptEvalCount: cr.PromptEvalCount,
PromptEvalDuration: cr.PromptEvalDuration,
EvalCount: cr.EvalCount,
EvalDuration: cr.EvalDuration,
2023-12-05 19:57:33 +00:00
},
}
if _, err := sb.WriteString(cr.Content); err != nil {
ch <- gin.H{"error": err.Error()}
}
if cr.Done {
res.TotalDuration = time.Since(checkpointStart)
res.LoadDuration = checkpointLoaded.Sub(checkpointStart)
if !req.Raw {
2024-08-01 20:50:05 +00:00
tokens, err := r.Tokenize(c.Request.Context(), prompt+sb.String())
if err != nil {
ch <- gin.H{"error": err.Error()}
return
}
2024-08-01 20:50:05 +00:00
res.Context = tokens
}
}
ch <- res
2024-06-17 17:38:55 +00:00
}); err != nil {
ch <- gin.H{"error": err.Error()}
}
}()
if req.Stream != nil && !*req.Stream {
2024-06-17 17:38:55 +00:00
var r api.GenerateResponse
2023-12-05 19:57:33 +00:00
var sb strings.Builder
2024-06-17 17:38:55 +00:00
for rr := range ch {
switch t := rr.(type) {
case api.GenerateResponse:
2024-06-17 17:38:55 +00:00
sb.WriteString(t.Response)
r = t
case gin.H:
2024-06-17 17:38:55 +00:00
msg, ok := t["error"].(string)
if !ok {
msg = "unexpected error format in response"
}
2024-06-17 17:38:55 +00:00
c.JSON(http.StatusInternalServerError, gin.H{"error": msg})
return
default:
2024-06-17 17:38:55 +00:00
c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected response"})
return
}
}
2024-06-17 17:38:55 +00:00
r.Response = sb.String()
c.JSON(http.StatusOK, r)
return
}
streamResponse(c, ch)
}
func (s *Server) EmbedHandler(c *gin.Context) {
checkpointStart := time.Now()
var req api.EmbedRequest
err := c.ShouldBindJSON(&req)
switch {
case errors.Is(err, io.EOF):
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
case err != nil:
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
truncate := true
if req.Truncate != nil && !*req.Truncate {
truncate = false
}
var input []string
switch i := req.Input.(type) {
case string:
if len(i) > 0 {
input = append(input, i)
}
case []any:
for _, v := range i {
if _, ok := v.(string); !ok {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "invalid input type"})
return
}
input = append(input, v.(string))
}
default:
if req.Input != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "invalid input type"})
return
}
}
r, m, opts, err := s.scheduleRunner(c.Request.Context(), req.Model, []Capability{}, req.Options, req.KeepAlive)
if err != nil {
handleScheduleError(c, req.Model, err)
return
}
checkpointLoaded := time.Now()
if len(input) == 0 {
c.JSON(http.StatusOK, api.EmbedResponse{Model: req.Model, Embeddings: [][]float32{}})
return
}
kvData, err := getKVData(m.ModelPath, false)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
var count int
for i, s := range input {
tokens, err := r.Tokenize(c.Request.Context(), s)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
ctxLen := min(opts.NumCtx, int(kvData.ContextLength()))
if len(tokens) > ctxLen {
if !truncate {
c.JSON(http.StatusBadRequest, gin.H{"error": "input length exceeds maximum context length"})
return
}
tokens = tokens[:ctxLen]
s, err = r.Detokenize(c.Request.Context(), tokens)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
}
count += len(tokens)
input[i] = s
}
var g errgroup.Group
embeddings := make([][]float32, len(input))
for i, text := range input {
g.Go(func() error {
embedding, err := r.Embedding(c.Request.Context(), text)
if err != nil {
return err
}
embeddings[i] = normalize(embedding)
return nil
})
}
if err := g.Wait(); err != nil {
slog.Error("embedding generation failed", "error", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": fmt.Errorf("failed to generate embeddings: %v", err)})
return
}
resp := api.EmbedResponse{
Model: req.Model,
Embeddings: embeddings,
TotalDuration: time.Since(checkpointStart),
LoadDuration: checkpointLoaded.Sub(checkpointStart),
PromptEvalCount: count,
}
c.JSON(http.StatusOK, resp)
}
func normalize(vec []float32) []float32 {
var sum float32
for _, v := range vec {
sum += v * v
}
norm := float32(0.0)
if sum > 0 {
norm = float32(1.0 / math.Sqrt(float64(sum)))
}
for i := range vec {
vec[i] *= norm
}
return vec
}
func (s *Server) EmbeddingsHandler(c *gin.Context) {
var req api.EmbeddingRequest
2024-06-17 17:38:55 +00:00
if err := c.ShouldBindJSON(&req); errors.Is(err, io.EOF) {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
2024-06-17 17:38:55 +00:00
} else if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
r, _, _, err := s.scheduleRunner(c.Request.Context(), req.Model, []Capability{}, req.Options, req.KeepAlive)
if err != nil {
2024-06-20 18:00:08 +00:00
handleScheduleError(c, req.Model, err)
return
}
// an empty request loads the model
if req.Prompt == "" {
c.JSON(http.StatusOK, api.EmbeddingResponse{Embedding: []float64{}})
return
}
embedding, err := r.Embedding(c.Request.Context(), req.Prompt)
if err != nil {
slog.Info(fmt.Sprintf("embedding generation failed: %v", err))
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate embedding"})
return
}
var e []float64
for _, v := range embedding {
e = append(e, float64(v))
}
resp := api.EmbeddingResponse{
Embedding: e,
}
c.JSON(http.StatusOK, resp)
}
func (s *Server) PullHandler(c *gin.Context) {
2023-07-11 18:54:22 +00:00
var req api.PullRequest
2023-10-18 23:08:42 +00:00
err := c.ShouldBindJSON(&req)
switch {
case errors.Is(err, io.EOF):
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
case err != nil:
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
2023-07-11 18:54:22 +00:00
return
}
name := model.ParseName(cmp.Or(req.Model, req.Name))
if !name.IsValid() {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "invalid model name"})
return
}
if err := checkNameExists(name); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
ch := make(chan any)
go func() {
defer close(ch)
2023-07-19 01:51:30 +00:00
fn := func(r api.ProgressResponse) {
ch <- r
}
2023-07-19 01:51:30 +00:00
2024-02-14 19:29:49 +00:00
regOpts := &registryOptions{
Insecure: req.Insecure,
}
ctx, cancel := context.WithCancel(c.Request.Context())
defer cancel()
if err := PullModel(ctx, name.DisplayShortest(), regOpts, fn); err != nil {
2023-07-20 19:12:08 +00:00
ch <- gin.H{"error": err.Error()}
}
}()
if req.Stream != nil && !*req.Stream {
waitForStream(c, ch)
return
}
streamResponse(c, ch)
}
func (s *Server) PushHandler(c *gin.Context) {
var req api.PushRequest
2023-10-18 23:08:42 +00:00
err := c.ShouldBindJSON(&req)
switch {
case errors.Is(err, io.EOF):
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
case err != nil:
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
2023-07-11 18:54:22 +00:00
return
}
2023-07-06 17:40:11 +00:00
var model string
if req.Model != "" {
model = req.Model
} else if req.Name != "" {
model = req.Name
} else {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
return
}
ch := make(chan any)
go func() {
defer close(ch)
2023-07-19 01:51:30 +00:00
fn := func(r api.ProgressResponse) {
ch <- r
}
2023-07-19 01:51:30 +00:00
2024-02-14 19:29:49 +00:00
regOpts := &registryOptions{
Insecure: req.Insecure,
}
2023-10-09 17:24:27 +00:00
ctx, cancel := context.WithCancel(c.Request.Context())
defer cancel()
if err := PushModel(ctx, model, regOpts, fn); err != nil {
2023-07-20 19:12:08 +00:00
ch <- gin.H{"error": err.Error()}
}
}()
if req.Stream != nil && !*req.Stream {
waitForStream(c, ch)
return
}
streamResponse(c, ch)
}
func checkNameExists(name model.Name) error {
names, err := Manifests()
if err != nil {
return err
}
for n := range names {
if strings.EqualFold(n.Filepath(), name.Filepath()) && n != name {
2024-08-01 21:52:15 +00:00
return errors.New("a model with that name already exists")
}
}
return nil
}
func (s *Server) CreateHandler(c *gin.Context) {
var r api.CreateRequest
if err := c.ShouldBindJSON(&r); errors.Is(err, io.EOF) {
2023-10-18 23:08:42 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
2024-04-30 17:55:19 +00:00
} else if err != nil {
2023-10-18 23:08:42 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
2023-07-13 02:07:15 +00:00
return
}
name := model.ParseName(cmp.Or(r.Model, r.Name))
2024-04-30 17:55:19 +00:00
if !name.IsValid() {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": errtypes.InvalidModelNameErrMsg})
return
}
if err := checkNameExists(name); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if r.Path == "" && r.Modelfile == "" {
2023-11-14 21:45:07 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "path or modelfile are required"})
2023-11-14 20:30:34 +00:00
return
}
2023-11-14 21:45:07 +00:00
var sr io.Reader = strings.NewReader(r.Modelfile)
if r.Path != "" && r.Modelfile == "" {
f, err := os.Open(r.Path)
2023-11-14 21:45:07 +00:00
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("error reading modelfile: %s", err)})
return
}
2024-04-30 17:55:19 +00:00
defer f.Close()
2023-11-14 21:45:07 +00:00
sr = f
2023-11-14 21:45:07 +00:00
}
2023-11-14 20:30:34 +00:00
f, err := parser.ParseFile(sr)
2023-11-14 20:30:34 +00:00
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
2023-07-11 18:54:22 +00:00
ch := make(chan any)
go func() {
defer close(ch)
fn := func(resp api.ProgressResponse) {
ch <- resp
}
ctx, cancel := context.WithCancel(c.Request.Context())
defer cancel()
quantization := cmp.Or(r.Quantize, r.Quantization)
2024-07-22 22:48:15 +00:00
if err := CreateModel(ctx, name, filepath.Dir(r.Path), strings.ToUpper(quantization), f, fn); errors.Is(err, errBadTemplate) {
ch <- gin.H{"error": err.Error(), "status": http.StatusBadRequest}
} else if err != nil {
2023-07-20 19:12:08 +00:00
ch <- gin.H{"error": err.Error()}
}
}()
2023-07-07 22:29:17 +00:00
if r.Stream != nil && !*r.Stream {
waitForStream(c, ch)
return
}
streamResponse(c, ch)
2023-07-05 19:37:33 +00:00
}
func (s *Server) DeleteHandler(c *gin.Context) {
var r api.DeleteRequest
if err := c.ShouldBindJSON(&r); errors.Is(err, io.EOF) {
2023-10-18 23:08:42 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
} else if err != nil {
2023-10-18 23:08:42 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
2023-07-20 23:09:23 +00:00
return
}
n := model.ParseName(cmp.Or(r.Model, r.Name))
if !n.IsValid() {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("name %q is invalid", cmp.Or(r.Model, r.Name))})
return
}
2023-09-27 00:28:14 +00:00
m, err := ParseNamedManifest(n)
2023-09-27 00:28:14 +00:00
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if err := m.Remove(); err != nil {
2023-09-27 00:28:14 +00:00
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if err := m.RemoveLayers(); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
2023-07-20 23:09:23 +00:00
}
func (s *Server) ShowHandler(c *gin.Context) {
2023-09-06 18:04:17 +00:00
var req api.ShowRequest
2023-10-18 23:08:42 +00:00
err := c.ShouldBindJSON(&req)
switch {
case errors.Is(err, io.EOF):
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
case err != nil:
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
2023-09-06 18:04:17 +00:00
return
}
if req.Model != "" {
2024-01-18 23:36:50 +00:00
// noop
} else if req.Name != "" {
2024-01-18 23:36:50 +00:00
req.Model = req.Name
} else {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "model is required"})
return
}
resp, err := GetModelInfo(req)
2023-09-06 18:04:17 +00:00
if err != nil {
switch {
case os.IsNotExist(err):
2024-01-18 23:36:50 +00:00
c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
case err.Error() == "invalid model name":
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
default:
2023-09-06 18:04:17 +00:00
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
}
return
}
c.JSON(http.StatusOK, resp)
}
func GetModelInfo(req api.ShowRequest) (*api.ShowResponse, error) {
m, err := GetModel(req.Model)
2023-09-06 18:04:17 +00:00
if err != nil {
return nil, err
}
modelDetails := api.ModelDetails{
ParentModel: m.ParentModel,
Format: m.Config.ModelFormat,
Family: m.Config.ModelFamily,
Families: m.Config.ModelFamilies,
ParameterSize: m.Config.ModelType,
QuantizationLevel: m.Config.FileType,
}
if req.System != "" {
m.System = req.System
}
2024-06-17 17:38:55 +00:00
msgs := make([]api.Message, len(m.Messages))
for i, msg := range m.Messages {
msgs[i] = api.Message{Role: msg.Role, Content: msg.Content}
2024-01-25 20:12:36 +00:00
}
n := model.ParseName(req.Model)
if !n.IsValid() {
2024-08-01 21:52:15 +00:00
return nil, errors.New("invalid model name")
}
manifest, err := ParseNamedManifest(n)
if err != nil {
return nil, err
}
2023-09-06 18:04:17 +00:00
resp := &api.ShowResponse{
License: strings.Join(m.License, "\n"),
System: m.System,
2024-06-10 21:54:42 +00:00
Template: m.Template.String(),
Details: modelDetails,
Messages: msgs,
ModifiedAt: manifest.fi.ModTime(),
2023-09-06 18:04:17 +00:00
}
var params []string
cs := 30
for k, v := range m.Options {
2023-09-06 18:04:17 +00:00
switch val := v.(type) {
case []interface{}:
for _, nv := range val {
2024-01-16 18:34:44 +00:00
params = append(params, fmt.Sprintf("%-*s %#v", cs, k, nv))
2023-09-06 18:04:17 +00:00
}
2024-01-16 18:34:44 +00:00
default:
params = append(params, fmt.Sprintf("%-*s %#v", cs, k, v))
2023-09-06 18:04:17 +00:00
}
}
resp.Parameters = strings.Join(params, "\n")
for k, v := range req.Options {
if _, ok := req.Options[k]; ok {
m.Options[k] = v
}
}
var sb strings.Builder
fmt.Fprintln(&sb, "# Modelfile generated by \"ollama show\"")
fmt.Fprintln(&sb, "# To build a new Modelfile based on this, replace FROM with:")
fmt.Fprintf(&sb, "# FROM %s\n\n", m.ShortName)
fmt.Fprint(&sb, m.String())
resp.Modelfile = sb.String()
kvData, err := getKVData(m.ModelPath, req.Verbose)
if err != nil {
return nil, err
}
delete(kvData, "general.name")
delete(kvData, "tokenizer.chat_template")
resp.ModelInfo = kvData
if len(m.ProjectorPaths) > 0 {
projectorData, err := getKVData(m.ProjectorPaths[0], req.Verbose)
if err != nil {
return nil, err
}
resp.ProjectorInfo = projectorData
}
2023-09-06 18:04:17 +00:00
return resp, nil
}
func getKVData(digest string, verbose bool) (llm.KV, error) {
maxArraySize := 0
if verbose {
maxArraySize = -1
}
kvData, err := llm.LoadModel(digest, maxArraySize)
if err != nil {
return nil, err
}
kv := kvData.KV()
if !verbose {
for k := range kv {
if t, ok := kv[k].([]any); len(t) > 5 && ok {
kv[k] = []any{}
}
}
}
return kv, nil
}
func (s *Server) ListHandler(c *gin.Context) {
2024-05-06 23:34:13 +00:00
ms, err := Manifests()
2023-07-18 16:09:45 +00:00
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
2023-08-30 18:14:12 +00:00
models := []api.ListModelResponse{}
2024-05-06 23:34:13 +00:00
for n, m := range ms {
var cf ConfigV2
if m.Config.Digest != "" {
f, err := m.Config.Open()
if err != nil {
slog.Warn("bad manifest filepath", "name", n, "error", err)
continue
}
defer f.Close()
if err := json.NewDecoder(f).Decode(&cf); err != nil {
slog.Warn("bad manifest config", "name", n, "error", err)
continue
}
2023-07-18 16:09:45 +00:00
}
2023-08-30 18:14:12 +00:00
2024-05-06 23:34:13 +00:00
// tag should never be masked
models = append(models, api.ListModelResponse{
2024-05-06 23:34:13 +00:00
Model: n.DisplayShortest(),
Name: n.DisplayShortest(),
Size: m.Size(),
Digest: m.digest,
ModifiedAt: m.fi.ModTime(),
Details: api.ModelDetails{
Format: cf.ModelFormat,
Family: cf.ModelFamily,
Families: cf.ModelFamilies,
ParameterSize: cf.ModelType,
QuantizationLevel: cf.FileType,
},
})
2023-07-18 16:09:45 +00:00
}
slices.SortStableFunc(models, func(i, j api.ListModelResponse) int {
2024-04-17 21:54:14 +00:00
// most recently modified first
return cmp.Compare(j.ModifiedAt.Unix(), i.ModifiedAt.Unix())
})
2023-07-19 22:00:28 +00:00
c.JSON(http.StatusOK, api.ListResponse{Models: models})
2023-07-18 16:09:45 +00:00
}
func (s *Server) CopyHandler(c *gin.Context) {
2024-04-16 23:22:38 +00:00
var r api.CopyRequest
if err := c.ShouldBindJSON(&r); errors.Is(err, io.EOF) {
2023-10-18 23:08:42 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
2024-04-16 23:22:38 +00:00
} else if err != nil {
2023-10-18 23:08:42 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
2023-07-24 15:27:28 +00:00
return
}
2024-04-16 23:22:38 +00:00
src := model.ParseName(r.Source)
if !src.IsValid() {
2024-05-01 19:39:05 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("source %q is invalid", r.Source)})
return
}
2024-04-16 23:22:38 +00:00
dst := model.ParseName(r.Destination)
if !dst.IsValid() {
2024-05-08 00:35:52 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("destination %q is invalid", r.Destination)})
2023-07-24 15:27:28 +00:00
return
}
2024-04-16 23:22:38 +00:00
if err := checkNameExists(dst); err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
2024-04-16 23:22:38 +00:00
if err := CopyModel(src, dst); errors.Is(err, os.ErrNotExist) {
c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model %q not found", r.Source)})
} else if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
}
2023-07-24 15:27:28 +00:00
}
func (s *Server) HeadBlobHandler(c *gin.Context) {
2023-11-14 22:07:40 +00:00
path, err := GetBlobsPath(c.Param("digest"))
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if _, err := os.Stat(path); err != nil {
c.AbortWithStatusJSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("blob %q not found", c.Param("digest"))})
return
}
2023-11-15 21:55:37 +00:00
c.Status(http.StatusOK)
2023-11-14 22:07:40 +00:00
}
func (s *Server) CreateBlobHandler(c *gin.Context) {
2024-05-20 21:58:27 +00:00
if ib, ok := intermediateBlobs[c.Param("digest")]; ok {
p, err := GetBlobsPath(ib)
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
if _, err := os.Stat(p); errors.Is(err, os.ErrNotExist) {
2024-05-20 21:58:27 +00:00
slog.Info("evicting intermediate blob which no longer exists", "digest", ib)
delete(intermediateBlobs, c.Param("digest"))
} else if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
} else {
c.Status(http.StatusOK)
return
}
}
2024-04-05 16:30:09 +00:00
path, err := GetBlobsPath(c.Param("digest"))
if err != nil {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
_, err = os.Stat(path)
switch {
case errors.Is(err, os.ErrNotExist):
// noop
case err != nil:
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
default:
c.Status(http.StatusOK)
return
}
2023-11-24 20:01:23 +00:00
layer, err := NewLayer(c.Request.Body, "")
2023-11-17 23:21:57 +00:00
if err != nil {
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
2023-11-24 20:01:23 +00:00
if layer.Digest != c.Param("digest") {
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("digest mismatch, expected %q, got %q", c.Param("digest"), layer.Digest)})
2023-11-14 22:07:40 +00:00
return
}
2023-11-15 21:55:37 +00:00
c.Status(http.StatusCreated)
2023-11-14 22:07:40 +00:00
}
2024-03-09 08:22:08 +00:00
func isLocalIP(ip netip.Addr) bool {
if interfaces, err := net.Interfaces(); err == nil {
for _, iface := range interfaces {
addrs, err := iface.Addrs()
if err != nil {
continue
}
for _, a := range addrs {
if parsed, _, err := net.ParseCIDR(a.String()); err == nil {
if parsed.String() == ip.String() {
return true
}
}
}
}
}
return false
}
func allowedHost(host string) bool {
2024-03-09 08:22:08 +00:00
if host == "" || host == "localhost" {
return true
}
if hostname, err := os.Hostname(); err == nil && host == hostname {
return true
}
2024-08-01 21:52:15 +00:00
tlds := []string{
2024-03-09 07:23:59 +00:00
"localhost",
"local",
"internal",
2023-12-15 00:47:40 +00:00
}
2024-03-09 07:29:53 +00:00
// check if the host is a local TLD
for _, tld := range tlds {
if strings.HasSuffix(host, "."+tld) {
return true
}
}
2024-03-09 07:29:53 +00:00
return false
2024-03-09 07:23:59 +00:00
}
2024-03-09 07:23:59 +00:00
func allowedHostsMiddleware(addr net.Addr) gin.HandlerFunc {
return func(c *gin.Context) {
if addr == nil {
c.Next()
return
}
2024-03-09 08:22:08 +00:00
if addr, err := netip.ParseAddrPort(addr.String()); err == nil && !addr.Addr().IsLoopback() {
c.Next()
return
}
host, _, err := net.SplitHostPort(c.Request.Host)
if err != nil {
host = c.Request.Host
}
2024-03-09 07:23:59 +00:00
if addr, err := netip.ParseAddr(host); err == nil {
2024-03-09 08:22:08 +00:00
if addr.IsLoopback() || addr.IsPrivate() || addr.IsUnspecified() || isLocalIP(addr) {
2024-03-09 07:23:59 +00:00
c.Next()
return
}
}
if allowedHost(host) {
2024-05-22 05:21:04 +00:00
if c.Request.Method == http.MethodOptions {
c.AbortWithStatus(http.StatusNoContent)
return
}
c.Next()
return
}
c.AbortWithStatus(http.StatusForbidden)
}
2023-12-15 00:47:40 +00:00
}
2023-12-15 00:47:40 +00:00
func (s *Server) GenerateRoutes() http.Handler {
2023-07-22 01:01:24 +00:00
config := cors.DefaultConfig()
config.AllowWildcard = true
config.AllowBrowserExtensions = true
config.AllowHeaders = []string{"Authorization", "Content-Type", "User-Agent", "Accept", "X-Requested-With"}
openAIProperties := []string{"lang", "package-version", "os", "arch", "runtime", "runtime-version", "async"}
for _, prop := range openAIProperties {
config.AllowHeaders = append(config.AllowHeaders, "x-stainless-"+prop)
}
2024-07-04 00:02:07 +00:00
config.AllowOrigins = envconfig.Origins()
2023-07-22 01:01:24 +00:00
2023-07-05 19:37:33 +00:00
r := gin.Default()
r.Use(
cors.New(config),
allowedHostsMiddleware(s.addr),
)
2023-07-05 19:37:33 +00:00
r.POST("/api/pull", s.PullHandler)
r.POST("/api/generate", s.GenerateHandler)
r.POST("/api/chat", s.ChatHandler)
r.POST("/api/embed", s.EmbedHandler)
r.POST("/api/embeddings", s.EmbeddingsHandler)
r.POST("/api/create", s.CreateHandler)
r.POST("/api/push", s.PushHandler)
r.POST("/api/copy", s.CopyHandler)
r.DELETE("/api/delete", s.DeleteHandler)
r.POST("/api/show", s.ShowHandler)
r.POST("/api/blobs/:digest", s.CreateBlobHandler)
r.HEAD("/api/blobs/:digest", s.HeadBlobHandler)
r.GET("/api/ps", s.PsHandler)
// Compatibility endpoints
r.POST("/v1/chat/completions", openai.ChatMiddleware(), s.ChatHandler)
r.POST("/v1/completions", openai.CompletionsMiddleware(), s.GenerateHandler)
r.POST("/v1/embeddings", openai.EmbeddingsMiddleware(), s.EmbedHandler)
r.GET("/v1/models", openai.ListMiddleware(), s.ListHandler)
r.GET("/v1/models/:model", openai.RetrieveMiddleware(), s.ShowHandler)
2023-09-21 23:38:03 +00:00
for _, method := range []string{http.MethodGet, http.MethodHead} {
r.Handle(method, "/", func(c *gin.Context) {
c.String(http.StatusOK, "Ollama is running")
})
r.Handle(method, "/api/tags", s.ListHandler)
2023-10-12 22:45:07 +00:00
r.Handle(method, "/api/version", func(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{"version": version.Version})
})
2023-09-21 23:38:03 +00:00
}
2023-12-15 00:47:40 +00:00
return r
}
func Serve(ln net.Listener) error {
level := slog.LevelInfo
2024-07-03 23:00:54 +00:00
if envconfig.Debug() {
level = slog.LevelDebug
}
slog.Info("server config", "env", envconfig.Values())
handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{
Level: level,
AddSource: true,
ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
if attr.Key == slog.SourceKey {
source := attr.Value.Any().(*slog.Source)
source.File = filepath.Base(source.File)
}
return attr
},
})
slog.SetDefault(slog.New(handler))
blobsDir, err := GetBlobsPath("")
if err != nil {
return err
}
if err := fixBlobs(blobsDir); err != nil {
return err
}
2024-07-04 00:22:13 +00:00
if !envconfig.NoPrune() {
2023-12-15 00:47:40 +00:00
// clean up unused layers and manifests
if err := PruneLayers(); err != nil {
return err
}
manifestsPath, err := GetManifestPath()
if err != nil {
return err
}
if err := PruneDirectory(manifestsPath); err != nil {
return err
}
}
ctx, done := context.WithCancel(context.Background())
schedCtx, schedDone := context.WithCancel(ctx)
sched := InitScheduler(schedCtx)
s := &Server{addr: ln.Addr(), sched: sched}
http.Handle("/", s.GenerateRoutes())
2023-12-15 00:47:40 +00:00
slog.Info(fmt.Sprintf("Listening on %s (version %s)", ln.Addr(), version.Version))
2023-12-15 00:47:40 +00:00
srvr := &http.Server{
// Use http.DefaultServeMux so we get net/http/pprof for
// free.
//
// TODO(bmizerany): Decide if we want to make this
// configurable so it is not exposed by default, or allow
// users to bind it to a different port. This was a quick
// and easy way to get pprof, but it may not be the best
// way.
Handler: nil,
}
// listen for a ctrl+c and stop any loaded llm
signals := make(chan os.Signal, 1)
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
go func() {
<-signals
srvr.Close()
schedDone()
sched.unloadAllRunners()
runners.Cleanup(build.EmbedFS)
done()
}()
if _, err := runners.Refresh(build.EmbedFS); err != nil {
return fmt.Errorf("unable to initialize llm runners %w", err)
}
s.sched.Run(schedCtx)
// At startup we retrieve GPU information so we can get log messages before loading a model
// This will log warnings to the log in case we have problems with detected GPUs
gpus := gpu.GetGPUInfo()
gpus.LogDetails()
err = srvr.Serve(ln)
// If server is closed from the signal handler, wait for the ctx to be done
// otherwise error out quickly
if !errors.Is(err, http.ErrServerClosed) {
return err
}
<-ctx.Done()
2024-05-16 23:25:38 +00:00
return nil
}
2023-07-06 17:40:11 +00:00
func waitForStream(c *gin.Context, ch chan interface{}) {
c.Header("Content-Type", "application/json")
for resp := range ch {
switch r := resp.(type) {
case api.ProgressResponse:
if r.Status == "success" {
c.JSON(http.StatusOK, r)
return
}
case gin.H:
status, ok := r["status"].(int)
if !ok {
status = http.StatusInternalServerError
}
if errorMsg, ok := r["error"].(string); ok {
c.JSON(status, gin.H{"error": errorMsg})
return
} else {
c.JSON(status, gin.H{"error": "unexpected error format in progress response"})
return
}
default:
c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected progress response"})
return
}
}
c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected end of progress response"})
}
func streamResponse(c *gin.Context, ch chan any) {
c.Header("Content-Type", "application/x-ndjson")
2023-07-11 18:54:22 +00:00
c.Stream(func(w io.Writer) bool {
val, ok := <-ch
if !ok {
return false
}
bts, err := json.Marshal(val)
if err != nil {
slog.Info(fmt.Sprintf("streamResponse: json.Marshal failed with %s", err))
2023-07-11 18:54:22 +00:00
return false
}
// Delineate chunks with new-line delimiter
2023-07-11 18:54:22 +00:00
bts = append(bts, '\n')
if _, err := w.Write(bts); err != nil {
slog.Info(fmt.Sprintf("streamResponse: w.Write failed with %s", err))
2023-07-11 18:54:22 +00:00
return false
}
return true
})
}
2023-12-05 19:57:33 +00:00
func (s *Server) PsHandler(c *gin.Context) {
models := []api.ProcessModelResponse{}
for _, v := range s.sched.loaded {
model := v.model
modelDetails := api.ModelDetails{
Format: model.Config.ModelFormat,
Family: model.Config.ModelFamily,
Families: model.Config.ModelFamilies,
ParameterSize: model.Config.ModelType,
QuantizationLevel: model.Config.FileType,
}
mr := api.ProcessModelResponse{
Model: model.ShortName,
Name: model.ShortName,
Size: int64(v.estimatedTotal),
SizeVRAM: int64(v.estimatedVRAM),
Digest: model.Digest,
Details: modelDetails,
ExpiresAt: v.expiresAt,
}
// The scheduler waits to set expiresAt, so if a model is loading it's
// possible that it will be set to the unix epoch. For those cases, just
// calculate the time w/ the sessionDuration instead.
var epoch time.Time
if v.expiresAt == epoch {
mr.ExpiresAt = time.Now().Add(v.sessionDuration)
}
models = append(models, mr)
}
slices.SortStableFunc(models, func(i, j api.ProcessModelResponse) int {
// longest duration remaining listed first
return cmp.Compare(j.ExpiresAt.Unix(), i.ExpiresAt.Unix())
})
c.JSON(http.StatusOK, api.ProcessResponse{Models: models})
}
func (s *Server) ChatHandler(c *gin.Context) {
checkpointStart := time.Now()
2023-12-05 19:57:33 +00:00
var req api.ChatRequest
2024-06-17 17:38:55 +00:00
if err := c.ShouldBindJSON(&req); errors.Is(err, io.EOF) {
2023-12-05 19:57:33 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "missing request body"})
return
2024-06-17 17:38:55 +00:00
} else if err != nil {
2023-12-05 19:57:33 +00:00
c.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
2024-09-11 23:36:21 +00:00
// expire the runner
if len(req.Messages) == 0 && req.KeepAlive != nil && int(req.KeepAlive.Seconds()) == 0 {
model, err := GetModel(req.Model)
if err != nil {
switch {
case os.IsNotExist(err):
c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model '%s' not found", req.Model)})
case err.Error() == "invalid model name":
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
default:
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
}
return
}
s.sched.expireRunner(model)
c.JSON(http.StatusOK, api.ChatResponse{
Model: req.Model,
CreatedAt: time.Now().UTC(),
Message: api.Message{Role: "assistant"},
Done: true,
DoneReason: "unload",
})
return
}
2024-06-17 17:38:55 +00:00
caps := []Capability{CapabilityCompletion}
if len(req.Tools) > 0 {
2024-06-20 20:45:47 +00:00
caps = append(caps, CapabilityTools)
}
r, m, opts, err := s.scheduleRunner(c.Request.Context(), req.Model, caps, req.Options, req.KeepAlive)
2024-06-17 17:38:55 +00:00
if errors.Is(err, errCapabilityCompletion) {
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("%q does not support chat", req.Model)})
2023-12-05 19:57:33 +00:00
return
2024-06-17 17:38:55 +00:00
} else if err != nil {
2024-06-20 18:00:08 +00:00
handleScheduleError(c, req.Model, err)
2023-12-05 19:57:33 +00:00
return
}
2024-02-01 01:39:38 +00:00
checkpointLoaded := time.Now()
2024-06-17 17:38:55 +00:00
if len(req.Messages) == 0 {
c.JSON(http.StatusOK, api.ChatResponse{
2024-05-09 20:30:14 +00:00
Model: req.Model,
2024-06-17 17:38:55 +00:00
CreatedAt: time.Now().UTC(),
Message: api.Message{Role: "assistant"},
2024-05-09 20:30:14 +00:00
Done: true,
DoneReason: "load",
2024-06-17 17:38:55 +00:00
})
return
}
2024-06-19 21:14:28 +00:00
msgs := append(m.Messages, req.Messages...)
if req.Messages[0].Role != "system" && m.System != "" {
2024-06-19 21:14:28 +00:00
msgs = append([]api.Message{{Role: "system", Content: m.System}}, msgs...)
}
2024-06-19 21:14:28 +00:00
prompt, images, err := chatPrompt(c.Request.Context(), m, r.Tokenize, opts, msgs, req.Tools)
2024-06-17 17:38:55 +00:00
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
2024-06-17 17:38:55 +00:00
slog.Debug("chat request", "images", len(images), "prompt", prompt)
2023-12-05 19:57:33 +00:00
ch := make(chan any)
go func() {
defer close(ch)
if err := r.Completion(c.Request.Context(), llm.CompletionRequest{
2024-06-17 17:38:55 +00:00
Prompt: prompt,
Images: images,
Format: req.Format,
Options: opts,
2024-06-17 17:38:55 +00:00
}, func(r llm.CompletionResponse) {
res := api.ChatResponse{
2024-05-09 20:30:14 +00:00
Model: req.Model,
CreatedAt: time.Now().UTC(),
Message: api.Message{Role: "assistant", Content: r.Content},
Done: r.Done,
DoneReason: r.DoneReason,
2023-12-05 19:57:33 +00:00
Metrics: api.Metrics{
PromptEvalCount: r.PromptEvalCount,
PromptEvalDuration: r.PromptEvalDuration,
EvalCount: r.EvalCount,
EvalDuration: r.EvalDuration,
},
}
if r.Done {
res.TotalDuration = time.Since(checkpointStart)
res.LoadDuration = checkpointLoaded.Sub(checkpointStart)
}
ch <- res
2024-06-17 17:38:55 +00:00
}); err != nil {
2023-12-05 19:57:33 +00:00
ch <- gin.H{"error": err.Error()}
}
}()
if req.Stream != nil && !*req.Stream {
2024-06-20 20:45:47 +00:00
var resp api.ChatResponse
2023-12-05 19:57:33 +00:00
var sb strings.Builder
2024-06-17 17:38:55 +00:00
for rr := range ch {
switch t := rr.(type) {
case api.ChatResponse:
2024-06-17 17:38:55 +00:00
sb.WriteString(t.Message.Content)
2024-06-20 20:45:47 +00:00
resp = t
case gin.H:
2024-06-17 17:38:55 +00:00
msg, ok := t["error"].(string)
if !ok {
msg = "unexpected error format in response"
}
2024-06-17 17:38:55 +00:00
c.JSON(http.StatusInternalServerError, gin.H{"error": msg})
return
default:
2024-06-17 17:38:55 +00:00
c.JSON(http.StatusInternalServerError, gin.H{"error": "unexpected response"})
return
2023-12-05 19:57:33 +00:00
}
}
2024-06-20 20:45:47 +00:00
resp.Message.Content = sb.String()
if len(req.Tools) > 0 {
if toolCalls, ok := m.parseToolCalls(sb.String()); ok {
resp.Message.ToolCalls = toolCalls
resp.Message.Content = ""
}
2024-06-20 20:45:47 +00:00
}
c.JSON(http.StatusOK, resp)
2023-12-05 19:57:33 +00:00
return
}
streamResponse(c, ch)
}
2024-06-20 18:00:08 +00:00
func handleScheduleError(c *gin.Context, name string, err error) {
2024-06-17 17:38:55 +00:00
switch {
case errors.Is(err, errCapabilities), errors.Is(err, errRequired):
2024-06-20 18:00:08 +00:00
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
2024-06-17 17:38:55 +00:00
case errors.Is(err, context.Canceled):
c.JSON(499, gin.H{"error": "request canceled"})
2024-06-17 17:38:55 +00:00
case errors.Is(err, ErrMaxQueue):
c.JSON(http.StatusServiceUnavailable, gin.H{"error": err.Error()})
2024-06-20 18:00:08 +00:00
case errors.Is(err, os.ErrNotExist):
c.JSON(http.StatusNotFound, gin.H{"error": fmt.Sprintf("model %q not found, try pulling it first", name)})
2024-06-17 17:38:55 +00:00
default:
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
}
}