diff --git a/llama.h b/llama.h index cfbbbc02d..77a84c18a 100644 --- a/llama.h +++ b/llama.h @@ -708,7 +708,6 @@ extern "C" { /// Apply chat template. Inspired by hf apply_chat_template() on python. /// Both "model" and "custom_template" are optional, but at least one is required. "custom_template" has higher precedence than "model" /// NOTE: This function only support some known jinja templates. It is not a jinja parser. - /// TODO: Add link to docs for this function so that developers can know the list of supported templates /// @param tmpl A Jinja template to use for this chat. If this is nullptr, the model’s default chat template will be used instead. /// @param chat Pointer to a list of multiple llama_chat_message /// @param n_msg Number of llama_chat_message in this chat