clean up convert tokenizer

This commit is contained in:
Michael Yang 2024-08-27 10:45:39 -07:00
parent 3eb08377f8
commit eae3af6807
2 changed files with 2 additions and 3 deletions

View file

@ -89,7 +89,7 @@ func TestMain(m *testing.M) {
os.Exit(m.Run())
}
func TestConvertFull(t *testing.T) {
func TestConvertModel(t *testing.T) {
cases := []string{
"Meta-Llama-3-8B-Instruct",
"Meta-Llama-3.1-8B-Instruct",

View file

@ -154,7 +154,6 @@ func parseTokenizer(fsys fs.FS, specialTokenTypes []string) (*Tokenizer, error)
}
type tokenizer struct {
Version string `json:"version"`
AddedTokens []token `json:"added_tokens"`
Model struct {
Type string `json:"type"`
@ -252,7 +251,7 @@ func parseVocabulary(fsys fs.FS) (*Vocabulary, error) {
return pattern.Func(fsys)
}
return nil, errors.New("unknown tensor format")
return nil, errors.New("unknown tokenizer format")
}
type SpecialVocabulary struct {