fix: pass chat handler not chat formatter for huggingface autotokenizer and tokenizer_config formats.
This commit is contained in:
parent
7f3209b1eb
commit
24f39454e9
1 changed files with 2 additions and 2 deletions
|
@ -78,7 +78,7 @@ class LlamaProxy:
|
||||||
settings.hf_pretrained_model_name_or_path is not None
|
settings.hf_pretrained_model_name_or_path is not None
|
||||||
), "hf_pretrained_model_name_or_path must be set for hf-autotokenizer"
|
), "hf_pretrained_model_name_or_path must be set for hf-autotokenizer"
|
||||||
chat_handler = (
|
chat_handler = (
|
||||||
llama_cpp.llama_chat_format.hf_autotokenizer_to_chat_formatter(
|
llama_cpp.llama_chat_format.hf_autotokenizer_to_chat_completion_handler(
|
||||||
settings.hf_pretrained_model_name_or_path
|
settings.hf_pretrained_model_name_or_path
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -87,7 +87,7 @@ class LlamaProxy:
|
||||||
settings.hf_tokenizer_config_path is not None
|
settings.hf_tokenizer_config_path is not None
|
||||||
), "hf_tokenizer_config_path must be set for hf-tokenizer-config"
|
), "hf_tokenizer_config_path must be set for hf-tokenizer-config"
|
||||||
chat_handler = (
|
chat_handler = (
|
||||||
llama_cpp.llama_chat_format.hf_tokenizer_config_to_chat_formatter(
|
llama_cpp.llama_chat_format.hf_tokenizer_config_to_chat_completion_handler(
|
||||||
json.load(open(settings.hf_tokenizer_config_path))
|
json.load(open(settings.hf_tokenizer_config_path))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in a new issue