Add chat template for orca-vicuna

This commit is contained in:
Kai Zau 2024-03-30 14:41:43 +09:00
parent f6104b9b77
commit e0f9d9d732

View file

@ -15714,6 +15714,7 @@ static int32_t llama_chat_apply_template_internal(
ss << "GPT4 Correct Assistant:";
}
} else if (tmpl == "vicuna" || tmpl.find("USER: ") != std::string::npos) {
// Vicuna 1.1+, Nous Capybara, etc.
for (auto message : chat) {
std::string role(message->role);
if (role == "user") {
@ -15730,6 +15731,22 @@ static int32_t llama_chat_apply_template_internal(
if (add_ass) {
ss << "ASSISTANT:";
}
} else if (tmpl == "orca-vicuna" || tmpl.find("SYSTEM: ") != std::string::npos) {
// Orca-Vicuna
for (auto message : chat) {
std::string role(message->role);
if (role == "system") {
ss << "SYSTEM: ";
} else if (role == "user") {
ss << "USER: ";
} else if (role == "assistant") {
ss << "ASSISTANT: ";
}
ss << message->content << "\n";
}
if (add_ass) {
ss << "ASSISTANT:";
}
} else {
// template not supported
return -1;