server: only check model template if there is no custom tmpl
This commit is contained in:
parent
10d86733f3
commit
2ab9cb96ed
1 changed files with 4 additions and 2 deletions
|
@ -2723,8 +2723,10 @@ int main(int argc, char **argv)
|
|||
LOG_INFO("model loaded", {});
|
||||
}
|
||||
|
||||
// check if the template comes with the model is supported by us
|
||||
llama.validate_model_chat_template(sparams);
|
||||
if (sparams.chat_template.empty()) { // custom chat template is not supplied
|
||||
// check if the template comes with the model is supported by us
|
||||
llama.validate_model_chat_template(sparams);
|
||||
}
|
||||
|
||||
// Middleware for API key validation
|
||||
auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue