fix gemma, command-r layer weights

This commit is contained in:
Michael Yang 2024-04-26 15:00:54 -07:00
parent 7fea1ecdf6
commit f81f308118

View file

@ -102,10 +102,14 @@ func EstimateGPULayers(gpus []gpu.GpuInfo, ggml *GGML, projectors []string, opts
layers := ggml.Tensors().Layers()
var memoryLayerOutput uint64
for k, v := range layers {
if k == "output" || k == "output_norm" {
memoryLayerOutput += v.size()
if layer, ok := layers["output_norm"]; ok {
memoryLayerOutput += layer.size()
}
if layer, ok := layers["output"]; ok {
memoryLayerOutput += layer.size()
} else if layer, ok := layers["token_embd"]; ok {
memoryLayerOutput += layer.size()
}
if gpus[0].Library == "metal" && opts.UseMMap {