Remove async from function signature to avoid blocking the server

This commit is contained in:
Andrei Betlen 2023-04-11 11:54:31 -04:00
parent 3727ba4d9e
commit 213cc5c340

View file

@ -196,7 +196,7 @@ CreateChatCompletionResponse = create_model_from_typeddict(llama_cpp.ChatComplet
"/v1/chat/completions",
response_model=CreateChatCompletionResponse,
)
async def create_chat_completion(
def create_chat_completion(
request: CreateChatCompletionRequest,
) -> Union[llama_cpp.ChatCompletion, EventSourceResponse]:
completion_or_chunks = llama.create_chat_completion(