From b3e343eae707fe9486916fb3d619d49cd6a9d12b Mon Sep 17 00:00:00 2001 From: MaggotHATE Date: Wed, 20 Nov 2024 16:04:55 +0500 Subject: [PATCH] Fix for `simple-chat` --- examples/simple-chat/simple-chat.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/simple-chat/simple-chat.cpp b/examples/simple-chat/simple-chat.cpp index 5f9973163..d85a385ce 100644 --- a/examples/simple-chat/simple-chat.cpp +++ b/examples/simple-chat/simple-chat.cpp @@ -158,10 +158,10 @@ int main(int argc, char ** argv) { // add the user input to the message list and format it messages.push_back({"user", strdup(user.c_str())}); - int new_len = llama_chat_apply_template(model, nullptr, messages.data(), messages.size(), true, formatted.data(), formatted.size()); + int new_len = llama_chat_apply_template(model, nullptr, nullptr, nullptr, messages.data(), messages.size(), true, formatted.data(), formatted.size()); if (new_len > (int)formatted.size()) { formatted.resize(new_len); - new_len = llama_chat_apply_template(model, nullptr, messages.data(), messages.size(), true, formatted.data(), formatted.size()); + new_len = llama_chat_apply_template(model, nullptr, nullptr, nullptr, messages.data(), messages.size(), true, formatted.data(), formatted.size()); } if (new_len < 0) { fprintf(stderr, "failed to apply the chat template\n"); @@ -178,7 +178,7 @@ int main(int argc, char ** argv) { // add the response to the messages messages.push_back({"assistant", strdup(response.c_str())}); - prev_len = llama_chat_apply_template(model, nullptr, messages.data(), messages.size(), false, nullptr, 0); + prev_len = llama_chat_apply_template(model, nullptr, nullptr, nullptr, messages.data(), messages.size(), false, nullptr, 0); if (prev_len < 0) { fprintf(stderr, "failed to apply the chat template\n"); return 1;