From 2f0a01465acd9f7cde78046a947f51978ff776c1 Mon Sep 17 00:00:00 2001 From: MaggotHATE Date: Thu, 21 Nov 2024 21:58:36 +0500 Subject: [PATCH] Cleanup of unused features --- examples/server/server.cpp | 1 - examples/server/utils.hpp | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 85a65efb9..a7a86548b 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -798,7 +798,6 @@ struct server_context { slot.oaicompat = false; slot.oaicompat_model = ""; } - std::string default_empty = ""; slot.params.stream = json_value(data, "stream", false); slot.params.cache_prompt = json_value(data, "cache_prompt", false); diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index 6c51b9f12..d82bc6464 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -338,7 +338,7 @@ inline std::string format_chat(const struct llama_model * model, const std::stri } if (!is_custom) formatted_chat = common_chat_apply_template(model, tmpl, chat, true); - LOG_WRN("formatted_chat using '%s': '%s'\n", tmpl.c_str(), formatted_chat.c_str()); + LOG_DBG("formatted_chat using '%s': '%s'\n", tmpl.c_str(), formatted_chat.c_str()); return formatted_chat; }