From 153e8524113621d3ca90d146e6dc5d42a5c42160 Mon Sep 17 00:00:00 2001 From: Olivier Chafik Date: Mon, 20 Jan 2025 20:55:52 +0000 Subject: [PATCH] Apply suggestions from code review Co-authored-by: Xuan Son Nguyen Co-authored-by: Georgi Gerganov --- common/common.cpp | 6 +++--- common/common.h | 4 ++-- include/llama.h | 1 + 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 9c535a176..ce023fc2b 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -1821,11 +1821,11 @@ std::string common_chat_format_example(const llama_chat_template & tmpl, bool us return common_chat_apply_template(tmpl, msgs, true, use_jinja); } -llama_chat_templates llama_chat_templates_from_model(const struct llama_model * model, const std::string & chat_template_override) +llama_chat_templates common_chat_templates_from_model(const struct llama_model * model, const std::string & chat_template_override) { auto vocab = llama_model_get_vocab(model); - auto bos_token = common_token_to_piece(vocab, llama_vocab_bos(vocab), true); - auto eos_token = common_token_to_piece(vocab, llama_vocab_eos(vocab), true); + auto token_bos = common_token_to_piece(vocab, llama_vocab_bos(vocab), true); + auto token_eos = common_token_to_piece(vocab, llama_vocab_eos(vocab), true); std::string default_template_src = chat_template_override; std::string tool_use_template_src = chat_template_override; bool has_explicit_template = !chat_template_override.empty(); diff --git a/common/common.h b/common/common.h index a96a99531..352cbb0fa 100644 --- a/common/common.h +++ b/common/common.h @@ -607,8 +607,8 @@ typedef minja::chat_template llama_chat_template; struct llama_chat_templates { bool has_explicit_template; // Model had builtin template or template overridde was specified. - std::unique_ptr default_template; // always set (defaults to chatml) - std::unique_ptr tool_use_template; + std::unique_ptr template_default; // always set (defaults to chatml) + std::unique_ptr template_tool_use; }; // CPP wrapper for llama_chat_apply_template diff --git a/include/llama.h b/include/llama.h index dca9314aa..3b75e7607 100644 --- a/include/llama.h +++ b/include/llama.h @@ -510,6 +510,7 @@ extern "C" { LLAMA_API uint64_t llama_model_size(const struct llama_model * model); // Get the default chat template. Returns nullptr if not available + // If name is NULL, returns the default chat template LLAMA_API const char * llama_model_chat_template(const struct llama_model * model, const char * name); // Returns the total number of parameters in the model