Add chatml fallback for cpp llama_chat_apply_template
(#8160)
* add chatml fallback for cpp `llama_chat_apply_template` * remove redundant code
This commit is contained in:
parent
ab3679112d
commit
16791b8f0b
2 changed files with 20 additions and 1 deletions
|
@ -380,6 +380,8 @@ struct llama_chat_msg {
|
|||
bool llama_chat_verify_template(const std::string & tmpl);
|
||||
|
||||
// CPP wrapper for llama_chat_apply_template
|
||||
// If the built-in template is not supported, we default to chatml
|
||||
// If the custom "tmpl" is not supported, we throw an error
|
||||
std::string llama_chat_apply_template(const struct llama_model * model,
|
||||
const std::string & tmpl,
|
||||
const std::vector<llama_chat_msg> & chat,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue