From 4184835078f3d9910e76b734b15814b1be389c46 Mon Sep 17 00:00:00 2001 From: caiyesd Date: Thu, 23 Nov 2023 14:19:50 +0800 Subject: [PATCH 1/2] Add chat format to support baichuan (#938) Signed-off-by: caiyesd --- llama_cpp/llama_chat_format.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index 7248516..b3e53be 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -471,6 +471,23 @@ def format_baichuan2( _prompt = _format_no_colon_single(system_message, _messages, _sep) return ChatFormatterResponse(prompt=_prompt) + +@register_chat_format("baichuan") +def format_baichuan( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + _system_template = "{system_message}" + _roles = dict(user="", assistant="") + _sep = "" + system_message = _get_system_message(messages) + system_message = _system_template.format(system_message=system_message) + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_no_colon_single(system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt) + + @register_chat_format("openbuddy") def format_openbuddy( messages: List[llama_types.ChatCompletionRequestMessage], From d68fc07b1bb81c83d82ece7d82dadd12244181ba Mon Sep 17 00:00:00 2001 From: mrfakename Date: Wed, 22 Nov 2023 22:20:08 -0800 Subject: [PATCH 2/2] Add Zephyr format (#937) --- llama_cpp/llama_chat_format.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index b3e53be..83eee9a 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -621,6 +621,21 @@ def format_mistrallite( _prompt = _format_no_colon_single(system_message, _messages, _sep) return ChatFormatterResponse(prompt=_prompt) +@register_chat_format("zephyr") +def format_zephyr( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + system_template = """<|system|> +{system_message}""" + system_message = _get_system_message(messages) + system_message = system_template.format(system_message=system_message) + _roles = dict(user="<|user|>\n", assistant="<|assistant|>\n") + _sep = "" + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_chatml(system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt, stop=_sep) @register_chat_format("chatml") def format_chatml(