llama_cpp server: prompt is a string

Not sure why this union type was here but taking a look at llama.py, prompt is only ever processed as a string for completion

This was breaking types when generating an openapi client
This commit is contained in:
Lucas Doyle 2023-05-02 14:08:51 -07:00
parent 7ab08b8d10
commit b9098b0ef7

View file

@ -126,7 +126,7 @@ repeat_penalty_field = Field(
)
class CreateCompletionRequest(BaseModel):
prompt: Union[str, List[str]] = Field(
prompt: Optional[str] = Field(
default="",
description="The prompt to generate completions for."
)
@ -175,9 +175,6 @@ CreateCompletionResponse = create_model_from_typeddict(llama_cpp.Completion)
def create_completion(
request: CreateCompletionRequest, llama: llama_cpp.Llama = Depends(get_llama)
):
if isinstance(request.prompt, list):
request.prompt = "".join(request.prompt)
completion_or_chunks = llama(
**request.dict(
exclude={