diff --git a/llama_cpp/llama_types.py b/llama_cpp/llama_types.py index b8bdb08..b770a01 100644 --- a/llama_cpp/llama_types.py +++ b/llama_cpp/llama_types.py @@ -58,7 +58,7 @@ class Completion(TypedDict): class ChatCompletionMessage(TypedDict): - role: Union[Literal["assistant"], Literal["user"], Literal["system"]] + role: Literal["assistant", "user", "system"] content: str class ChatCompletionChoice(TypedDict): diff --git a/llama_cpp/server/app.py b/llama_cpp/server/app.py index 9adddcd..886ee6d 100644 --- a/llama_cpp/server/app.py +++ b/llama_cpp/server/app.py @@ -215,8 +215,8 @@ def create_embedding( class ChatCompletionRequestMessage(BaseModel): - role: Union[Literal["system"], Literal["user"], Literal["assistant"]] = Field( - default=Literal["user"], description="The role of the message." + role: Literal["system", "user", "assistant"] = Field( + default="user", description="The role of the message." ) content: str = Field(default="", description="The content of the message.")