From 39795570dbe90a05f98520c3fd0b7d783464ab11 Mon Sep 17 00:00:00 2001 From: "f.buciuni" Date: Fri, 7 Feb 2025 19:54:55 +0100 Subject: [PATCH] updating velvet chat template --- src/llama-chat.cpp | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/llama-chat.cpp b/src/llama-chat.cpp index 876c90691..af0539bd4 100644 --- a/src/llama-chat.cpp +++ b/src/llama-chat.cpp @@ -573,8 +573,11 @@ int32_t llm_chat_apply_template( // Velvet template std::string leading_space = ""; std::string trailing_space = ""; - bool trim_assistant_message = true; + bool trim_assistant_message = false; bool is_inside_turn = false; + std::string system_message = ""; + std::string last_message(chat.back()->content); + ss << ""; for (auto message : chat) { if (!is_inside_turn) { ss << leading_space << "" << trailing_space; @@ -583,9 +586,9 @@ int32_t llm_chat_apply_template( std::string role(message->role); std::string content(message->content); if (role == "system") { - ss << content << "\n\n"; + system_message = content + "\n\n"; } else if (role == "user") { - ss << content << leading_space << ""; + ss << (content==last_message ? system_message : "") << content << leading_space << ""; } else { ss << trailing_space << (trim_assistant_message ? trim(content) : content) << ""; is_inside_turn = false;