Add chat template for orca-vicuna
This commit is contained in:
parent
f6104b9b77
commit
e0f9d9d732
1 changed files with 17 additions and 0 deletions
17
llama.cpp
17
llama.cpp
|
@ -15714,6 +15714,7 @@ static int32_t llama_chat_apply_template_internal(
|
||||||
ss << "GPT4 Correct Assistant:";
|
ss << "GPT4 Correct Assistant:";
|
||||||
}
|
}
|
||||||
} else if (tmpl == "vicuna" || tmpl.find("USER: ") != std::string::npos) {
|
} else if (tmpl == "vicuna" || tmpl.find("USER: ") != std::string::npos) {
|
||||||
|
// Vicuna 1.1+, Nous Capybara, etc.
|
||||||
for (auto message : chat) {
|
for (auto message : chat) {
|
||||||
std::string role(message->role);
|
std::string role(message->role);
|
||||||
if (role == "user") {
|
if (role == "user") {
|
||||||
|
@ -15730,6 +15731,22 @@ static int32_t llama_chat_apply_template_internal(
|
||||||
if (add_ass) {
|
if (add_ass) {
|
||||||
ss << "ASSISTANT:";
|
ss << "ASSISTANT:";
|
||||||
}
|
}
|
||||||
|
} else if (tmpl == "orca-vicuna" || tmpl.find("SYSTEM: ") != std::string::npos) {
|
||||||
|
// Orca-Vicuna
|
||||||
|
for (auto message : chat) {
|
||||||
|
std::string role(message->role);
|
||||||
|
if (role == "system") {
|
||||||
|
ss << "SYSTEM: ";
|
||||||
|
} else if (role == "user") {
|
||||||
|
ss << "USER: ";
|
||||||
|
} else if (role == "assistant") {
|
||||||
|
ss << "ASSISTANT: ";
|
||||||
|
}
|
||||||
|
ss << message->content << "\n";
|
||||||
|
}
|
||||||
|
if (add_ass) {
|
||||||
|
ss << "ASSISTANT:";
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// template not supported
|
// template not supported
|
||||||
return -1;
|
return -1;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue