tidy intermediate blobs
This commit is contained in:
parent
8800c8a59b
commit
f36f1d6be9
3 changed files with 11 additions and 16 deletions
|
@ -341,8 +341,8 @@ func CreateModel(ctx context.Context, name, modelFileDir, quantization string, m
|
|||
}
|
||||
} else if strings.HasPrefix(c.Args, "@") {
|
||||
digest := strings.TrimPrefix(c.Args, "@")
|
||||
if ib, ok := intermediateBlobs.Load(digest); ok {
|
||||
p, err := GetBlobsPath(ib.(string))
|
||||
if ib, ok := intermediateBlobs[digest]; ok {
|
||||
p, err := GetBlobsPath(ib)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -352,8 +352,8 @@ func CreateModel(ctx context.Context, name, modelFileDir, quantization string, m
|
|||
} else if err != nil {
|
||||
return err
|
||||
} else {
|
||||
fn(api.ProgressResponse{Status: fmt.Sprintf("using cached layer %s", ib.(string))})
|
||||
digest = ib.(string)
|
||||
fn(api.ProgressResponse{Status: fmt.Sprintf("using cached layer %s", ib)})
|
||||
digest = ib
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -415,14 +415,10 @@ func CreateModel(ctx context.Context, name, modelFileDir, quantization string, m
|
|||
return err
|
||||
}
|
||||
|
||||
f16digest := baseLayer.Layer.Digest
|
||||
|
||||
baseLayer.Layer, err = NewLayer(temp, baseLayer.Layer.MediaType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
intermediateBlobs.Store(f16digest, baseLayer.Layer.Digest)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ import (
|
|||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
"github.com/ollama/ollama/convert"
|
||||
|
@ -18,7 +17,7 @@ import (
|
|||
"github.com/ollama/ollama/types/model"
|
||||
)
|
||||
|
||||
var intermediateBlobs sync.Map
|
||||
var intermediateBlobs map[string]string = make(map[string]string)
|
||||
|
||||
type layerWithGGML struct {
|
||||
*Layer
|
||||
|
@ -169,7 +168,7 @@ func parseFromZipFile(_ context.Context, file *os.File, digest string, fn func(a
|
|||
|
||||
layer, err := NewLayer(temp, "application/vnd.ollama.image.model")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("aaa: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bin, err := layer.Open()
|
||||
|
@ -185,7 +184,7 @@ func parseFromZipFile(_ context.Context, file *os.File, digest string, fn func(a
|
|||
|
||||
layers = append(layers, &layerWithGGML{layer, ggml})
|
||||
|
||||
intermediateBlobs.Store(digest, layer.Digest)
|
||||
intermediateBlobs[digest] = layer.Digest
|
||||
return layers, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -841,16 +841,16 @@ func (s *Server) HeadBlobHandler(c *gin.Context) {
|
|||
}
|
||||
|
||||
func (s *Server) CreateBlobHandler(c *gin.Context) {
|
||||
ib, ok := intermediateBlobs.Load(c.Param("digest"))
|
||||
if ok {
|
||||
p, err := GetBlobsPath(ib.(string))
|
||||
if ib, ok := intermediateBlobs[c.Param("digest")]; ok {
|
||||
p, err := GetBlobsPath(ib)
|
||||
if err != nil {
|
||||
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
if _, err := os.Stat(p); errors.Is(err, os.ErrNotExist) {
|
||||
intermediateBlobs.Delete(c.Param("digest"))
|
||||
slog.Info("evicting intermediate blob which no longer exists", "digest", ib)
|
||||
delete(intermediateBlobs, c.Param("digest"))
|
||||
} else if err != nil {
|
||||
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
|
|
Loading…
Reference in a new issue