diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index 5017ca7..7248516 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -456,6 +456,21 @@ def format_oasst_llama( return ChatFormatterResponse(prompt=_prompt) +@register_chat_format("baichuan-2") +def format_baichuan2( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + _system_template = "{system_message}" + _roles = dict(user="", assistant="") + _sep = "" + system_message = _get_system_message(messages) + system_message = _system_template.format(system_message=system_message) + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_no_colon_single(system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt) + @register_chat_format("openbuddy") def format_openbuddy( messages: List[llama_types.ChatCompletionRequestMessage],