From 79fd89a62b5a894331dd8d68fde490ef3c14757b Mon Sep 17 00:00:00 2001 From: Yingbei Date: Fri, 22 Mar 2024 15:41:44 -0700 Subject: [PATCH] minor fix to address tools_call output format --- examples/server/utils.hpp | 7 +++++-- llama.cpp | 1 + 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index d3727fc6c..f12b5307c 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -341,7 +341,9 @@ static std::string rubra_format_function_call_str(const std::vector & func std::string final_str = "You have access to the following tools:\n"; json type_mapping = { {"string", "str"}, + {"integer", "int"}, {"number", "float"}, + {"float", "float"}, {"object", "Dict[str, Any]"}, {"array", "List"}, {"boolean", "bool"}, @@ -592,10 +594,11 @@ static json format_final_response_oaicompat(const json & request, json result, c json tool_call; tool_call["id"] = pc["id"]; tool_call["type"] = "function"; - tool_call["function"] = json{{ + tool_call["function"] = json{ {"name" , pc["name"]}, {"arguments" , pc["kwargs"].dump()}, - }}; + }; + printf("format_final_response_oaicompat: tool_call: %s\n", tool_call.dump().c_str()); oai_format_tool_calls.push_back(tool_call); } choices = json::array({json{{"finish_reason", finish_reason}, diff --git a/llama.cpp b/llama.cpp index ba53d05ce..ed46216b5 100644 --- a/llama.cpp +++ b/llama.cpp @@ -14518,6 +14518,7 @@ static int32_t llama_chat_apply_template_internal( // construct the prompt bool is_inside_turn = true; // skip BOS at the beginning // ss << "[INST] "; + for (auto message : chat) { std::string content = strip_message ? trim(message->content) : message->content; std::string role(message->role);