fix gemma, command-r layer weights
This commit is contained in:
parent
7fea1ecdf6
commit
f81f308118
1 changed files with 8 additions and 4 deletions
|
@ -102,10 +102,14 @@ func EstimateGPULayers(gpus []gpu.GpuInfo, ggml *GGML, projectors []string, opts
|
||||||
layers := ggml.Tensors().Layers()
|
layers := ggml.Tensors().Layers()
|
||||||
|
|
||||||
var memoryLayerOutput uint64
|
var memoryLayerOutput uint64
|
||||||
for k, v := range layers {
|
if layer, ok := layers["output_norm"]; ok {
|
||||||
if k == "output" || k == "output_norm" {
|
memoryLayerOutput += layer.size()
|
||||||
memoryLayerOutput += v.size()
|
}
|
||||||
}
|
|
||||||
|
if layer, ok := layers["output"]; ok {
|
||||||
|
memoryLayerOutput += layer.size()
|
||||||
|
} else if layer, ok := layers["token_embd"]; ok {
|
||||||
|
memoryLayerOutput += layer.size()
|
||||||
}
|
}
|
||||||
|
|
||||||
if gpus[0].Library == "metal" && opts.UseMMap {
|
if gpus[0].Library == "metal" && opts.UseMMap {
|
||||||
|
|
Loading…
Reference in a new issue