Add chatml fallback for cpp llama_chat_apply_template (#8160)

* add chatml fallback for cpp `llama_chat_apply_template`

* remove redundant code
This commit is contained in:
Xuan Son Nguyen 2024-06-27 18:14:19 +02:00 committed by GitHub
parent ab3679112d
commit 16791b8f0b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 20 additions and 1 deletions

View file

@ -380,6 +380,8 @@ struct llama_chat_msg {
bool llama_chat_verify_template(const std::string & tmpl);
// CPP wrapper for llama_chat_apply_template
// If the built-in template is not supported, we default to chatml
// If the custom "tmpl" is not supported, we throw an error
std::string llama_chat_apply_template(const struct llama_model * model,
const std::string & tmpl,
const std::vector<llama_chat_msg> & chat,