Use server sent events function for streaming completion

This commit is contained in:
Andrei Betlen 2023-05-19 02:04:30 -04:00
parent f0ec6e615e
commit dc39cc0fa4

View file

@ -259,8 +259,15 @@ def create_completion(
)
)
if request.stream:
async def server_sent_events(
chunks: Iterator[llama_cpp.CompletionChunk],
):
for chunk in chunks:
yield dict(data=json.dumps(chunk))
chunks: Iterator[llama_cpp.CompletionChunk] = completion_or_chunks # type: ignore
return EventSourceResponse(dict(data=json.dumps(chunk)) for chunk in chunks)
return EventSourceResponse(server_sent_events(chunks))
completion: llama_cpp.Completion = completion_or_chunks # type: ignore
return completion