From 44869c59d6b331e742d8bb2dab94304fed9842fa Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Fri, 3 May 2024 16:11:49 -0700 Subject: [PATCH] omit prompt and generate settings from final response --- llm/ext_server/server.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/llm/ext_server/server.cpp b/llm/ext_server/server.cpp index 3448bcc5..df28c412 100644 --- a/llm/ext_server/server.cpp +++ b/llm/ext_server/server.cpp @@ -1186,8 +1186,6 @@ struct llama_server_context {"model", params.model_alias}, {"tokens_predicted", slot.n_decoded}, {"tokens_evaluated", slot.n_prompt_tokens}, - {"generation_settings", get_formated_generation(slot)}, - {"prompt", slot.prompt}, {"truncated", slot.truncated}, {"stopped_eos", slot.stopped_eos}, {"stopped_word", slot.stopped_word},