llama_cpp server: fields for the embedding endpoint
This commit is contained in:
parent
8dcbf65a45
commit
fa2a61e065
1 changed files with 3 additions and 1 deletions
|
@ -189,7 +189,9 @@ def create_completion(
|
||||||
class CreateEmbeddingRequest(BaseModel):
|
class CreateEmbeddingRequest(BaseModel):
|
||||||
# ignored, but marked as required for the sake of compatibility with openai's api
|
# ignored, but marked as required for the sake of compatibility with openai's api
|
||||||
model: str = model_field
|
model: str = model_field
|
||||||
input: str
|
input: str = Field(
|
||||||
|
description="The input to embed."
|
||||||
|
)
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
schema_extra = {
|
schema_extra = {
|
||||||
|
|
Loading…
Reference in a new issue