Apply suggestions from code review
Co-authored-by: Xuan Son Nguyen <thichthat@gmail.com> Co-authored-by: Georgi Gerganov <ggerganov@gmail.com>
This commit is contained in:
parent
cc50356470
commit
153e852411
3 changed files with 6 additions and 5 deletions
|
@ -1821,11 +1821,11 @@ std::string common_chat_format_example(const llama_chat_template & tmpl, bool us
|
|||
return common_chat_apply_template(tmpl, msgs, true, use_jinja);
|
||||
}
|
||||
|
||||
llama_chat_templates llama_chat_templates_from_model(const struct llama_model * model, const std::string & chat_template_override)
|
||||
llama_chat_templates common_chat_templates_from_model(const struct llama_model * model, const std::string & chat_template_override)
|
||||
{
|
||||
auto vocab = llama_model_get_vocab(model);
|
||||
auto bos_token = common_token_to_piece(vocab, llama_vocab_bos(vocab), true);
|
||||
auto eos_token = common_token_to_piece(vocab, llama_vocab_eos(vocab), true);
|
||||
auto token_bos = common_token_to_piece(vocab, llama_vocab_bos(vocab), true);
|
||||
auto token_eos = common_token_to_piece(vocab, llama_vocab_eos(vocab), true);
|
||||
std::string default_template_src = chat_template_override;
|
||||
std::string tool_use_template_src = chat_template_override;
|
||||
bool has_explicit_template = !chat_template_override.empty();
|
||||
|
|
|
@ -607,8 +607,8 @@ typedef minja::chat_template llama_chat_template;
|
|||
|
||||
struct llama_chat_templates {
|
||||
bool has_explicit_template; // Model had builtin template or template overridde was specified.
|
||||
std::unique_ptr<llama_chat_template> default_template; // always set (defaults to chatml)
|
||||
std::unique_ptr<llama_chat_template> tool_use_template;
|
||||
std::unique_ptr<llama_chat_template> template_default; // always set (defaults to chatml)
|
||||
std::unique_ptr<llama_chat_template> template_tool_use;
|
||||
};
|
||||
|
||||
// CPP wrapper for llama_chat_apply_template
|
||||
|
|
|
@ -510,6 +510,7 @@ extern "C" {
|
|||
LLAMA_API uint64_t llama_model_size(const struct llama_model * model);
|
||||
|
||||
// Get the default chat template. Returns nullptr if not available
|
||||
// If name is NULL, returns the default chat template
|
||||
LLAMA_API const char * llama_model_chat_template(const struct llama_model * model, const char * name);
|
||||
|
||||
// Returns the total number of parameters in the model
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue