This commit is contained in:
Xuan Son Nguyen 2024-08-30 21:51:12 +02:00
parent 5f06d37baf
commit d25cd7f9e4
2 changed files with 42 additions and 47 deletions

View file

@ -3069,9 +3069,14 @@ int main(int argc, char ** argv) {
} }
json body = json::parse(req.body); json body = json::parse(req.body);
if (body.contains("tools") && ctx_server.tool_format != LLAMA_TOOL_FORMAT_NOT_SUPPORTED) { if (body.contains("tools")) {
body["prompt"] = format_chat_with_tool(ctx_server.tool_format, body.at("messages"), body.at("tools")); if (ctx_server.tool_format != LLAMA_TOOL_FORMAT_NOT_SUPPORTED) {
body.erase(body.find("tools")); body["prompt"] = format_chat_with_tool(ctx_server.tool_format, body.at("messages"), body.at("tools"));
body.erase(body.find("tools"));
} else {
res_error(res, format_error_response("This server does not support tool calls. Start it with `--tool-calls`", ERROR_TYPE_NOT_SUPPORTED));
return;
}
} }
json data = oaicompat_completion_params_parse(ctx_server.model, body, params.chat_template); json data = oaicompat_completion_params_parse(ctx_server.model, body, params.chat_template);

View file

@ -499,7 +499,15 @@ static std::vector<json> format_partial_response_oaicompat(json result, const st
finish_reason = "length"; finish_reason = "length";
} }
std::time_t t = std::time(0); auto wrap_choices = [&completion_id, &modelname](json choices) -> json {
return json{
{"choices", choices},
{"created", std::time(0)},
{"id", completion_id},
{"model", modelname},
{"object", "chat.completion.chunk"}
};
};
json choices; json choices;
json delta = has_tool_calls json delta = has_tool_calls
@ -519,22 +527,14 @@ static std::vector<json> format_partial_response_oaicompat(json result, const st
{"delta", json::object()}}}); {"delta", json::object()}}});
if (has_tool_calls) { if (has_tool_calls) {
// tool call must be send as two updates // tool call must be send as two updates
json initial_ret = json{{"choices", json::array({json{ json initial_ret = wrap_choices(json::array({
{"finish_reason", nullptr}, json{
{"index", 0}, {"finish_reason", nullptr},
{"delta", delta}}})}, {"index", 0},
{"created", t}, {"delta", delta},
{"id", completion_id}, }
{"model", modelname}, }));
{"object", "chat.completion.chunk"}}; json second_ret = wrap_choices(choices);
json second_ret = json{
{"choices", choices},
{"created", t},
{"id", completion_id},
{"model", modelname},
{"object", "chat.completion.chunk"}};
return std::vector<json>({initial_ret, second_ret}); return std::vector<json>({initial_ret, second_ret});
} }
} else { } else {
@ -545,26 +545,22 @@ static std::vector<json> format_partial_response_oaicompat(json result, const st
{"delta", json{{"role", "assistant"}}}}}); {"delta", json{{"role", "assistant"}}}}});
} else { } else {
// We have to send this as two updates to conform to openai behavior // We have to send this as two updates to conform to openai behavior
json initial_ret = json{{"choices", json::array({json{ json initial_ret = wrap_choices(json::array({
{"finish_reason", nullptr}, json{
{"index", 0}, {"finish_reason", nullptr},
{"delta", json{ {"index", 0},
{"role", "assistant"} {"delta", json{
}}}})}, {"role", "assistant"},
{"created", t}, }},
{"id", completion_id}, }
{"model", modelname}, }));
{"object", "chat.completion.chunk"}}; json second_ret = wrap_choices(json::array({
json{
json second_ret = json{ {"finish_reason", nullptr},
{"choices", json::array({json{{"finish_reason", nullptr}, {"index", 0},
{"index", 0}, {"delta", delta},
{"delta", delta}}})}, }
{"created", t}, }));
{"id", completion_id},
{"model", modelname},
{"object", "chat.completion.chunk"}};
return std::vector<json>({initial_ret, second_ret}); return std::vector<json>({initial_ret, second_ret});
} }
} else { } else {
@ -582,13 +578,7 @@ static std::vector<json> format_partial_response_oaicompat(json result, const st
} }
} }
json ret = json { json ret = wrap_choices(choices);
{"choices", choices},
{"created", t},
{"id", completion_id},
{"model", modelname},
{"object", "chat.completion.chunk"}
};
if (!finish_reason.empty()) { if (!finish_reason.empty()) {
int num_tokens_predicted = json_value(result, "tokens_predicted", 0); int num_tokens_predicted = json_value(result, "tokens_predicted", 0);
int num_prompt_tokens = json_value(result, "tokens_evaluated", 0); int num_prompt_tokens = json_value(result, "tokens_evaluated", 0);