diff --git a/common/chat.cpp b/common/chat.cpp index 70827bbcf..2b17374d5 100644 --- a/common/chat.cpp +++ b/common/chat.cpp @@ -841,4 +841,4 @@ common_chat_msg common_chat_parse(const std::string & input, common_chat_format default: throw std::runtime_error("Unsupported format: " + common_chat_format_name(format)); } -} \ No newline at end of file +} diff --git a/common/chat.hpp b/common/chat.hpp index fdcc8ef90..ca165aa13 100644 --- a/common/chat.hpp +++ b/common/chat.hpp @@ -32,7 +32,7 @@ enum common_chat_format { COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2, COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1, COMMON_CHAT_FORMAT_HERMES_2_PRO, - + COMMON_CHAT_FORMAT_COUNT, // Not a format, just the # formats }; diff --git a/examples/server/server.cpp b/examples/server/server.cpp index ff254fa09..fbe16c57d 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -340,7 +340,7 @@ struct server_task { { auto it = data.find("chat_format"); if (it != data.end()) { - params.oaicompat_chat_format = static_cast(it->get()); + params.oaicompat_chat_format = static_cast(it->get()); } else { params.oaicompat_chat_format = defaults.oaicompat_chat_format; } diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index c589d6d40..157df6a1e 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -643,7 +643,7 @@ static json oaicompat_completion_params_parse( inputs.json_schema = json_value(llama_params, "json_schema", json::object()); auto chat_params = common_chat_params_init(tmpl, inputs); - llama_params["chat_format"] = static_cast(chat_params.format); + llama_params["chat_format"] = static_cast(chat_params.format); llama_params["prompt"] = chat_params.prompt; llama_params["grammar"] = chat_params.grammar; llama_params["grammar_lazy"] = chat_params.grammar_lazy;