llama_chat_get_typed_template

This commit is contained in:
ngxson 2024-04-24 18:27:39 +02:00
parent 81b5903890
commit 0d3363e4e6
2 changed files with 7 additions and 7 deletions

View file

@ -17307,7 +17307,7 @@ LLAMA_API int32_t llama_chat_get_model_template(
} }
} }
LLAMA_API llama_chat_template llama_chat_get_template_type(const char * tmpl) { LLAMA_API llama_chat_template llama_chat_get_typed_template(const char * tmpl) {
if (tmpl == nullptr) { if (tmpl == nullptr) {
return LLAMA_CHAT_TEMPLATE_NOT_SUPPORTED; return LLAMA_CHAT_TEMPLATE_NOT_SUPPORTED;
} }
@ -17596,7 +17596,7 @@ LLAMA_API int32_t llama_chat_apply_template(
} }
// detect template type // detect template type
llama_chat_template ttmpl = llama_chat_get_template_type(curr_tmpl.c_str()); llama_chat_template ttmpl = llama_chat_get_typed_template(curr_tmpl.c_str());
bool support_system_message = llama_chat_support_system_message(ttmpl); bool support_system_message = llama_chat_support_system_message(ttmpl);
if (ttmpl == LLAMA_CHAT_TEMPLATE_NOT_SUPPORTED) { if (ttmpl == LLAMA_CHAT_TEMPLATE_NOT_SUPPORTED) {
return -1; return -1;

10
llama.h
View file

@ -892,12 +892,12 @@ extern "C" {
char * buf, char * buf,
int32_t length); int32_t length);
/// Get the enum llama_chat_template based on Jinja template /// Get the value of enum llama_chat_template based on given Jinja template
/// @param tmpl Jinja template (a string) /// @param tmpl Jinja template (a string)
/// @return The currect enum llama_chat_template /// @return The correct value of enum llama_chat_template
LLAMA_API llama_chat_template llama_chat_get_template_type(const char * tmpl); LLAMA_API llama_chat_template llama_chat_get_typed_template(const char * tmpl);
/// Get the format prefix for a given message /// Get the format prefix for a given message (based on role)
/// @param tmpl Use enum llama_chat_template /// @param tmpl Use enum llama_chat_template
/// @param role The role of the current message /// @param role The role of the current message
/// @param prev_role The role of the previous message, can be nullptr /// @param prev_role The role of the previous message, can be nullptr
@ -911,7 +911,7 @@ extern "C" {
char * buf, char * buf,
int32_t length); int32_t length);
/// Get the format postfix for a given message /// Get the format postfix for a given message (based on role)
/// @param tmpl Use enum llama_chat_template /// @param tmpl Use enum llama_chat_template
/// @param role The role of the current message /// @param role The role of the current message
/// @param prev_role The role of the previous message, can be nullptr /// @param prev_role The role of the previous message, can be nullptr