llama_cpp server: move logprobs to supported
I think this is actually supported (its in the arguments of `LLama.__call__`, which is how the completion is invoked). decision: mark as supported
This commit is contained in:
parent
b47b9549d5
commit
1e42913599
1 changed files with 1 additions and 2 deletions
|
@ -79,12 +79,11 @@ class CreateCompletionRequest(BaseModel):
|
|||
echo: bool = False
|
||||
stop: Optional[List[str]] = []
|
||||
stream: bool = False
|
||||
logprobs: Optional[int] = Field(None)
|
||||
|
||||
# ignored, but marked as required for the sake of compatibility with openai's api
|
||||
model: str = model_field
|
||||
|
||||
logprobs: Optional[int] = Field(None)
|
||||
|
||||
# llama.cpp specific parameters
|
||||
top_k: int = 40
|
||||
repeat_penalty: float = 1.1
|
||||
|
|
Loading…
Reference in a new issue