diff --git a/examples/server/functionary-test.cpp b/examples/server/functionary-test.cpp index f3eb08089..15763fcc1 100644 --- a/examples/server/functionary-test.cpp +++ b/examples/server/functionary-test.cpp @@ -111,4 +111,4 @@ int main() { std::cout << "\n" << llama_functionary::convert_response_to_oai_choices(test_response) << "\n"; return 0; -} \ No newline at end of file +} diff --git a/examples/server/functionary.hpp b/examples/server/functionary.hpp index e124f79b2..338d8b1f9 100644 --- a/examples/server/functionary.hpp +++ b/examples/server/functionary.hpp @@ -22,6 +22,7 @@ using json = nlohmann::json; #define FUNCTIONARY_RECIP_ALL "all" #define FUNCTIONARY_RECIP_NONE "no-tool-call" +namespace llama_functionary { template static T json_value(const json &body, const std::string &key, const T &default_value) @@ -55,9 +56,6 @@ inline std::vector str_split(std::string str, const std::string & d return output; } - -namespace llama_functionary { - typedef struct message { std::string from; // can be "system", "user", "assistant" or name of function std::string recipient = FUNCTIONARY_RECIP_ALL; @@ -273,7 +271,7 @@ inline json convert_response_to_oai_choices(const std::string & content) { } // build final response json choices = json::array(); - // TODO: technically, functionary can reponse both text + tool_call in one shot. But for some reasons, the original implementation of OpenAI only return either ofthem, not both. + // TODO: technically, functionary can reponse both text + tool_call in one shot. But for some reasons, the original implementation of OpenAI only return only one, not both. if (tool_calls.size() > 0) { choices.push_back(json{ {"index", 0}, @@ -297,4 +295,4 @@ inline json convert_response_to_oai_choices(const std::string & content) { return choices; } -} // namespace llama_functionary \ No newline at end of file +} // namespace llama_functionary diff --git a/examples/server/oai.hpp b/examples/server/oai.hpp index 6d668f316..1a2792702 100644 --- a/examples/server/oai.hpp +++ b/examples/server/oai.hpp @@ -9,6 +9,7 @@ #include "json.hpp" #include "utils.hpp" +#include "functionary.hpp" #define DEFAULT_OAICOMPAT_MODEL "gpt-3.5-turbo-0613"