Use server sent events function for streaming completion
This commit is contained in:
parent
f0ec6e615e
commit
dc39cc0fa4
1 changed files with 8 additions and 1 deletions
|
@ -259,8 +259,15 @@ def create_completion(
|
|||
)
|
||||
)
|
||||
if request.stream:
|
||||
|
||||
async def server_sent_events(
|
||||
chunks: Iterator[llama_cpp.CompletionChunk],
|
||||
):
|
||||
for chunk in chunks:
|
||||
yield dict(data=json.dumps(chunk))
|
||||
|
||||
chunks: Iterator[llama_cpp.CompletionChunk] = completion_or_chunks # type: ignore
|
||||
return EventSourceResponse(dict(data=json.dumps(chunk)) for chunk in chunks)
|
||||
return EventSourceResponse(server_sent_events(chunks))
|
||||
completion: llama_cpp.Completion = completion_or_chunks # type: ignore
|
||||
return completion
|
||||
|
||||
|
|
Loading…
Reference in a new issue