gemma: only apply system_prompt on non-model message
This commit is contained in:
parent
57b4c2613b
commit
edf3b53a5c
1 changed files with 1 additions and 1 deletions
|
@ -12795,7 +12795,7 @@ static int32_t llama_chat_apply_template_internal(
|
||||||
// in gemma, "assistant" is "model"
|
// in gemma, "assistant" is "model"
|
||||||
role = role == "assistant" ? "model" : message->role;
|
role = role == "assistant" ? "model" : message->role;
|
||||||
ss << "<start_of_turn>" << role << "\n";
|
ss << "<start_of_turn>" << role << "\n";
|
||||||
if (!system_prompt.empty()) {
|
if (!system_prompt.empty() && role != "model") {
|
||||||
ss << system_prompt << "\n\n";
|
ss << system_prompt << "\n\n";
|
||||||
system_prompt = "";
|
system_prompt = "";
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue