2023-07-21 20:33:56 +00:00
|
|
|
package llm
|
|
|
|
|
2024-03-14 17:24:13 +00:00
|
|
|
// #cgo CFLAGS: -Illama.cpp
|
|
|
|
// #cgo darwin,arm64 LDFLAGS: ${SRCDIR}/build/darwin/arm64_static/libllama.a -lstdc++
|
|
|
|
// #cgo darwin,amd64 LDFLAGS: ${SRCDIR}/build/darwin/x86_64_static/libllama.a -lstdc++
|
|
|
|
// #cgo windows,amd64 LDFLAGS: ${SRCDIR}/build/windows/amd64_static/libllama.a -static -lstdc++
|
2024-04-27 04:24:53 +00:00
|
|
|
// #cgo windows,arm64 LDFLAGS: ${SRCDIR}/build/windows/arm64_static/libllama.a -static -lstdc++
|
2024-03-14 17:24:13 +00:00
|
|
|
// #cgo linux,amd64 LDFLAGS: ${SRCDIR}/build/linux/x86_64_static/libllama.a -lstdc++
|
|
|
|
// #cgo linux,arm64 LDFLAGS: ${SRCDIR}/build/linux/arm64_static/libllama.a -lstdc++
|
2024-04-05 15:49:04 +00:00
|
|
|
// #include <stdlib.h>
|
2024-03-14 17:24:13 +00:00
|
|
|
// #include "llama.h"
|
|
|
|
import "C"
|
2024-04-05 15:49:04 +00:00
|
|
|
import (
|
|
|
|
"fmt"
|
2024-05-11 19:49:24 +00:00
|
|
|
"strings"
|
2024-04-05 15:49:04 +00:00
|
|
|
"unsafe"
|
|
|
|
)
|
2024-03-14 17:24:13 +00:00
|
|
|
|
|
|
|
// SystemInfo is an unused example of calling llama.cpp functions using CGo
|
|
|
|
func SystemInfo() string {
|
|
|
|
return C.GoString(C.llama_print_system_info())
|
2023-12-20 18:36:01 +00:00
|
|
|
}
|
2024-04-05 15:49:04 +00:00
|
|
|
|
2024-04-23 22:18:45 +00:00
|
|
|
func Quantize(infile, outfile string, ftype fileType) error {
|
2024-04-05 15:49:04 +00:00
|
|
|
cinfile := C.CString(infile)
|
|
|
|
defer C.free(unsafe.Pointer(cinfile))
|
|
|
|
|
|
|
|
coutfile := C.CString(outfile)
|
|
|
|
defer C.free(unsafe.Pointer(coutfile))
|
|
|
|
|
|
|
|
params := C.llama_model_quantize_default_params()
|
|
|
|
params.nthread = -1
|
2024-04-12 20:55:12 +00:00
|
|
|
params.ftype = ftype.Value()
|
2024-04-05 15:49:04 +00:00
|
|
|
|
2024-04-12 20:55:12 +00:00
|
|
|
if rc := C.llama_model_quantize(cinfile, coutfile, ¶ms); rc != 0 {
|
|
|
|
return fmt.Errorf("llama_model_quantize: %d", rc)
|
2024-04-05 15:49:04 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2024-05-11 19:49:24 +00:00
|
|
|
|
|
|
|
type llamaModel struct {
|
|
|
|
m *C.struct_llama_model
|
|
|
|
}
|
|
|
|
|
|
|
|
func newLlamaModel(p string) *llamaModel {
|
|
|
|
cs := C.CString(p)
|
|
|
|
defer C.free(unsafe.Pointer(cs))
|
|
|
|
|
|
|
|
return &llamaModel{
|
|
|
|
C.llama_load_model_from_file(
|
|
|
|
cs,
|
|
|
|
C.llama_model_default_params(),
|
|
|
|
),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (llm *llamaModel) Close() {
|
|
|
|
C.llama_free_model(llm.m)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (llm *llamaModel) Tokenize(s string) []int {
|
|
|
|
cs := C.CString(s)
|
|
|
|
defer C.free(unsafe.Pointer(cs))
|
|
|
|
|
|
|
|
tokens := make([]int, len(s)+2)
|
|
|
|
if n := C.llama_tokenize(llm.m, cs, C.int(len(s)), (*C.llama_token)(unsafe.Pointer(&tokens[0])), C.int(len(s)+2), false, true); n > 0 {
|
|
|
|
return tokens[:n]
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (llm *llamaModel) Detokenize(i32s []int) string {
|
|
|
|
var sb strings.Builder
|
|
|
|
for _, i32 := range i32s {
|
|
|
|
c := make([]byte, 512)
|
|
|
|
if n := C.llama_token_to_piece(llm.m, C.llama_token(i32), (*C.char)(unsafe.Pointer(&c[0])), C.int(len(c)), false); n > 0 {
|
|
|
|
sb.WriteString(unsafe.String(&c[0], n))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return sb.String()
|
|
|
|
}
|