From 0daf16defcc353de715e29e6103e4c7a2422ee58 Mon Sep 17 00:00:00 2001 From: Andrei Betlen Date: Wed, 12 Apr 2023 19:08:11 -0400 Subject: [PATCH] Enable logprobs on completion endpoint --- llama_cpp/server/__main__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/llama_cpp/server/__main__.py b/llama_cpp/server/__main__.py index 4360506..8b9614e 100644 --- a/llama_cpp/server/__main__.py +++ b/llama_cpp/server/__main__.py @@ -118,7 +118,6 @@ def create_completion(request: CreateCompletionRequest, llama: llama_cpp.Llama=D exclude={ "model", "n", - "logprobs", "frequency_penalty", "presence_penalty", "best_of",