pass vocab to common_chat_params_init

This commit is contained in:
ochafik 2025-02-03 01:16:02 +00:00
parent d3b60b8ad8
commit 87de852b7f
4 changed files with 11 additions and 10 deletions

View file

@ -1907,9 +1907,9 @@ struct server_context {
}});
GGML_ASSERT(templates.template_default);
try {
common_chat_params_init(*templates.template_default, inputs);
common_chat_params_init(*templates.template_default, inputs, vocab);
if (templates.template_tool_use) {
common_chat_params_init(*templates.template_tool_use, inputs);
common_chat_params_init(*templates.template_tool_use, inputs, vocab);
}
return true;
} catch (const std::exception & e) {
@ -4048,7 +4048,7 @@ int main(int argc, char ** argv) {
}
auto body = json::parse(req.body);
json data = oaicompat_completion_params_parse(body, params.use_jinja, ctx_server.chat_templates);
json data = oaicompat_completion_params_parse(body, params.use_jinja, ctx_server.chat_templates, llama_model_get_vocab(ctx_server.model));
return handle_completions_impl(
SERVER_TASK_TYPE_COMPLETION,
@ -4061,7 +4061,7 @@ int main(int argc, char ** argv) {
// same with handle_chat_completions, but without inference part
const auto handle_apply_template = [&ctx_server, &params, &res_ok](const httplib::Request & req, httplib::Response & res) {
auto body = json::parse(req.body);
json data = oaicompat_completion_params_parse(body, params.use_jinja, ctx_server.chat_templates);
json data = oaicompat_completion_params_parse(body, params.use_jinja, ctx_server.chat_templates, llama_model_get_vocab(ctx_server.model));
res_ok(res, {{ "prompt", std::move(data.at("prompt")) }});
};