Rwkv chat template fix (#10001)
* llama: remove useless template matching for rwkv-world Signed-off-by: Molly Sophia <mollysophia379@gmail.com> * converter: Add comment about the hack for rwkv models Signed-off-by: Molly Sophia <mollysophia379@gmail.com> * Update src/llama.cpp Co-authored-by: Xuan Son Nguyen <thichthat@gmail.com> --------- Signed-off-by: Molly Sophia <mollysophia379@gmail.com> Co-authored-by: Xuan Son Nguyen <thichthat@gmail.com>
This commit is contained in:
parent
c421ac072d
commit
11d47057a5
3 changed files with 3 additions and 5 deletions
|
@ -21697,7 +21697,8 @@ static int32_t llama_chat_apply_template_internal(
|
|||
if (add_ass) {
|
||||
ss << "[|assistant|]";
|
||||
}
|
||||
} else if (tmpl == "rwkv-world" || tmpl_contains("rwkv-world") || tmpl_contains("'User: ' + message['content'] + '\n\nAssistant:'")) {
|
||||
} else if (tmpl == "rwkv-world" || tmpl_contains("rwkv-world")) {
|
||||
// this template requires the model to have "\n\n" as EOT token
|
||||
for (auto message : chat) {
|
||||
std::string role(message->role);
|
||||
if (role == "user") {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue