From 2ab9cb96edba28199b3751af0e108c877069b522 Mon Sep 17 00:00:00 2001 From: ngxson Date: Wed, 21 Feb 2024 17:41:04 +0100 Subject: [PATCH] server: only check model template if there is no custom tmpl --- examples/server/server.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 05c2707ec..13037ac71 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -2723,8 +2723,10 @@ int main(int argc, char **argv) LOG_INFO("model loaded", {}); } - // check if the template comes with the model is supported by us - llama.validate_model_chat_template(sparams); + if (sparams.chat_template.empty()) { // custom chat template is not supplied + // check if the template comes with the model is supported by us + llama.validate_model_chat_template(sparams); + } // Middleware for API key validation auto validate_api_key = [&sparams](const httplib::Request &req, httplib::Response &res) -> bool {