Revert "llama_cpp server: prompt is a string". Closes #187

This reverts commit b9098b0ef7.
This commit is contained in:
Andrei Betlen 2023-05-12 07:16:57 -04:00
parent 684d7c8c17
commit 8895b9002a

View file

@ -167,8 +167,9 @@ frequency_penalty_field = Field(
) )
class CreateCompletionRequest(BaseModel): class CreateCompletionRequest(BaseModel):
prompt: Optional[str] = Field( prompt: Union[str, List[str]] = Field(
default="", description="The prompt to generate completions for." default="",
description="The prompt to generate completions for."
) )
suffix: Optional[str] = Field( suffix: Optional[str] = Field(
default=None, default=None,
@ -222,6 +223,9 @@ CreateCompletionResponse = create_model_from_typeddict(llama_cpp.Completion)
def create_completion( def create_completion(
request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama) request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama)
): ):
if isinstance(request.prompt, list):
request.prompt = "".join(request.prompt)
completion_or_chunks = llama( completion_or_chunks = llama(
**request.dict( **request.dict(
exclude={ exclude={