From edf3b53a5c9ab02fbeaaaa49f03a05507c3188f7 Mon Sep 17 00:00:00 2001 From: ngxson Date: Thu, 22 Feb 2024 16:48:59 +0100 Subject: [PATCH] gemma: only apply system_prompt on non-model message --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index a408c2ffe..40dda265c 100644 --- a/llama.cpp +++ b/llama.cpp @@ -12795,7 +12795,7 @@ static int32_t llama_chat_apply_template_internal( // in gemma, "assistant" is "model" role = role == "assistant" ? "model" : message->role; ss << "" << role << "\n"; - if (!system_prompt.empty()) { + if (!system_prompt.empty() && role != "model") { ss << system_prompt << "\n\n"; system_prompt = ""; }