From ef65fc5ff4ca35f88fb86fc636ec5659f990fe1a Mon Sep 17 00:00:00 2001 From: mrfakename Date: Mon, 20 Nov 2023 21:19:25 -0800 Subject: [PATCH] Add MistralLite, Intel, and OpenChat prompt formats (#927) * Add MistralLite format * Update llama_chat_format.py * Update llama_chat_format.py --- llama_cpp/llama_chat_format.py | 43 ++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/llama_cpp/llama_chat_format.py b/llama_cpp/llama_chat_format.py index efab0b0..a855305 100644 --- a/llama_cpp/llama_chat_format.py +++ b/llama_cpp/llama_chat_format.py @@ -529,6 +529,19 @@ def format_phind( _prompt = _format_add_colon_single(_system_message, _messages, _sep) return ChatFormatterResponse(prompt=_prompt) +@register_chat_format("intel") +def format_intel( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + _roles = dict(user="### User:", assistant="### Assistant:") + _sep = "\n" + _system_message = "### System:\n{system_message}" + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_add_colon_single(_system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt) + @register_chat_format("open-orca") def format_open_orca( @@ -557,6 +570,21 @@ def format_open_orca( return ChatFormatterResponse(prompt=_prompt, stop=stop_str) +@register_chat_format("mistrallite") +def format_mistrallite( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + _roles = dict(user="<|prompter|>", assistant="\n<|assistant|>") + _sep = " " + system_template = """<|system|>{system_message}""" + system_message = _get_system_message(messages) + system_message = system_template.format(system_message=system_message) + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_no_colon_single(system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt) + @register_chat_format("chatml") def format_chatml( messages: List[llama_types.ChatCompletionRequestMessage], @@ -573,6 +601,21 @@ def format_chatml( _prompt = _format_chatml(system_message, _messages, _sep) return ChatFormatterResponse(prompt=_prompt, stop=_sep) +@register_chat_format("openchat") +def format_openchat( + messages: List[llama_types.ChatCompletionRequestMessage], + **kwargs: Any, +) -> ChatFormatterResponse: + system_template = "{system_message}<|end_of_turn|>" + system_message = _get_system_message(messages) + system_message = system_template.format(system_message=system_message) + _roles = dict(user="GPT4 Correct User: ", assistant="<|end_of_turn|>GPT4 Correct Assistant: ") + _sep = "<|end_of_turn|>" + _messages = _map_roles(messages, _roles) + _messages.append((_roles["assistant"], None)) + _prompt = _format_chatml(system_message, _messages, _sep) + return ChatFormatterResponse(prompt=_prompt, stop=_sep) + @register_chat_completion_handler("functionary") def functionary_chat_handler(