Revert "llama_cpp server: prompt is a string". Closes #187
This reverts commit b9098b0ef7
.
This commit is contained in:
parent
684d7c8c17
commit
8895b9002a
1 changed files with 6 additions and 2 deletions
|
@ -167,8 +167,9 @@ frequency_penalty_field = Field(
|
|||
)
|
||||
|
||||
class CreateCompletionRequest(BaseModel):
|
||||
prompt: Optional[str] = Field(
|
||||
default="", description="The prompt to generate completions for."
|
||||
prompt: Union[str, List[str]] = Field(
|
||||
default="",
|
||||
description="The prompt to generate completions for."
|
||||
)
|
||||
suffix: Optional[str] = Field(
|
||||
default=None,
|
||||
|
@ -222,6 +223,9 @@ CreateCompletionResponse = create_model_from_typeddict(llama_cpp.Completion)
|
|||
def create_completion(
|
||||
request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama)
|
||||
):
|
||||
if isinstance(request.prompt, list):
|
||||
request.prompt = "".join(request.prompt)
|
||||
|
||||
completion_or_chunks = llama(
|
||||
**request.dict(
|
||||
exclude={
|
||||
|
|
Loading…
Reference in a new issue