From dbbfc4ba2f8460e130dc268096f5906d3d22347b Mon Sep 17 00:00:00 2001 From: Lucas Doyle Date: Mon, 1 May 2023 11:48:37 -0700 Subject: [PATCH] llama_cpp server: fix to ChatCompletionRequestMessage When I generate a client, it breaks because it fails to process the schema of ChatCompletionRequestMessage These fix that: - I think `Union[Literal["user"], Literal["channel"], ...]` is the same as Literal["user", "channel", ...] - Turns out default value `Literal["user"]` isn't JSON serializable, so replace with "user" --- llama_cpp/llama_types.py | 2 +- llama_cpp/server/app.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/llama_cpp/llama_types.py b/llama_cpp/llama_types.py index b8bdb08..b770a01 100644 --- a/llama_cpp/llama_types.py +++ b/llama_cpp/llama_types.py @@ -58,7 +58,7 @@ class Completion(TypedDict): class ChatCompletionMessage(TypedDict): - role: Union[Literal["assistant"], Literal["user"], Literal["system"]] + role: Literal["assistant", "user", "system"] content: str class ChatCompletionChoice(TypedDict): diff --git a/llama_cpp/server/app.py b/llama_cpp/server/app.py index 9adddcd..886ee6d 100644 --- a/llama_cpp/server/app.py +++ b/llama_cpp/server/app.py @@ -215,8 +215,8 @@ def create_embedding( class ChatCompletionRequestMessage(BaseModel): - role: Union[Literal["system"], Literal["user"], Literal["assistant"]] = Field( - default=Literal["user"], description="The role of the message." + role: Literal["system", "user", "assistant"] = Field( + default="user", description="The role of the message." ) content: str = Field(default="", description="The content of the message.")