fix gcc build

This commit is contained in:
ochafik 2024-09-26 06:50:51 +01:00
parent 749a21c67a
commit 3d2650ce65
8 changed files with 24 additions and 29 deletions

View file

@ -471,16 +471,17 @@ std::string llama_detokenize(
// Chat template utils // Chat template utils
// //
struct llama_chat_msg_tool_call {
std::string name;
std::string arguments;
};
// same as llama_chat_message, but uses std::string and std::vector // same as llama_chat_message, but uses std::string and std::vector
struct llama_chat_msg { struct llama_chat_msg {
std::string role; std::string role;
std::string content; std::string content;
std::string tool; std::string tool;
struct llama_tool_call { std::vector<struct llama_chat_msg_tool_call> tool_calls;
std::string name;
std::string arguments;
};
std::vector<llama_tool_call> tool_calls;
}; };
// Check if the template supplied via "--chat-template" is supported or not. Returns true if it's valid // Check if the template supplied via "--chat-template" is supported or not. Returns true if it's valid
@ -571,8 +572,8 @@ private:
// The AhoCorasick algorithm allows efficient string matching with multiple patterns. // The AhoCorasick algorithm allows efficient string matching with multiple patterns.
// See https://en.wikipedia.org/wiki/Aho%E2%80%93Corasick_algorithm // See https://en.wikipedia.org/wiki/Aho%E2%80%93Corasick_algorithm
struct TrieNode { struct TrieNode {
std::unordered_map<char, TrieNode> children; std::unordered_map<char, struct TrieNode> children;
TrieNode* fail = nullptr; struct TrieNode* fail = nullptr;
int output = -1; int output = -1;
size_t depth = 0; size_t depth = 0;

View file

@ -1041,15 +1041,15 @@ std::string json_schema_to_grammar(const json & schema) {
} }
std::string build_grammar(const std::function<void(const llama_grammar_builder &)> & cb) { std::string build_grammar(const std::function<void(const llama_grammar_builder &)> & cb) {
SchemaConverter converter([&](const std::string & name) { return json(); }, /* dotall= */ false); SchemaConverter converter([&](const std::string &) { return json(); }, /* dotall= */ false);
llama_grammar_builder builder { llama_grammar_builder builder {
.add_rule = [&](const std::string & name, const std::string & rule) { /* .add_rule = */ [&](const std::string & name, const std::string & rule) {
return converter.add_rule(name, rule); return converter.add_rule(name, rule);
}, },
.add_schema = [&](const std::string & name, const nlohmann::ordered_json & schema) { /* .add_schema = */ [&](const std::string & name, const nlohmann::ordered_json & schema) {
return converter.visit(schema, name); return converter.visit(schema, name);
}, },
.resolve_refs = [&](nlohmann::ordered_json & schema) { /* .resolve_refs = */ [&](nlohmann::ordered_json & schema) {
converter.resolve_refs(schema, ""); converter.resolve_refs(schema, "");
} }
}; };

View file

@ -2160,7 +2160,7 @@ private:
throw unterminated(**start); throw unterminated(**start);
} }
children.emplace_back(nonstd_make_unique<MacroNode>(token->location, std::move(macro_token->name), std::move(macro_token->params), std::move(body))); children.emplace_back(nonstd_make_unique<MacroNode>(token->location, std::move(macro_token->name), std::move(macro_token->params), std::move(body)));
} else if (auto comment_token = dynamic_cast<CommentTemplateToken*>(token.get())) { } else if (dynamic_cast<CommentTemplateToken*>(token.get())) {
// Ignore comments // Ignore comments
} else if (dynamic_cast<EndForTemplateToken*>(token.get()) } else if (dynamic_cast<EndForTemplateToken*>(token.get())
|| dynamic_cast<EndSetTemplateToken*>(token.get()) || dynamic_cast<EndSetTemplateToken*>(token.get())

View file

@ -41,8 +41,7 @@ static bool parse_json(std::string::const_iterator & it, const std::string::cons
json_error_locator() : position(0), found_error(false) {} json_error_locator() : position(0), found_error(false) {}
bool parse_error(std::size_t position, const std::string & last_token, const json::exception & ex) override { bool parse_error(std::size_t position, const std::string &, const json::exception &) override {
// LOG_WARNING("JSON error (Expected)", {{"position", position}, {"last_token", last_token}, {"error", ex.what()}});
this->position = position - 1; this->position = position - 1;
this->found_error = true; this->found_error = true;
return false; return false;
@ -70,13 +69,11 @@ static bool parse_json(std::string::const_iterator & it, const std::string::cons
temptative_end = end; temptative_end = end;
} }
std::string json_sub {it, temptative_end}; std::string json_sub {it, temptative_end};
// LOG_WARNING("Parsing json", {{"json_sub", json_sub}});
try { try {
out = json::parse(json_sub); out = json::parse(json_sub);
it = temptative_end; it = temptative_end;
return true; return true;
} catch (const std::exception & e) { } catch (const std::exception &) {
// LOG_WARNING("Failed to parse tool call", {{"json_sub", json_sub}, {"error", e.what()}});
return false; return false;
} }
} }

View file

@ -1,18 +1,14 @@
#pragma once #pragma once
#include "ggml.h" #include "ggml.h"
#include "common.h"
// Change JSON_ASSERT from assert() to GGML_ASSERT: // Change JSON_ASSERT from assert() to GGML_ASSERT:
#define JSON_ASSERT GGML_ASSERT #define JSON_ASSERT GGML_ASSERT
#include "json.hpp" #include "json.hpp"
struct llama_tool_call {
std::string name;
std::string arguments;
};
struct llama_tool_calls { struct llama_tool_calls {
std::string content; std::string content;
std::vector<llama_tool_call> tool_calls; std::vector<llama_chat_msg_tool_call> tool_calls;
}; };
struct llama_tool_call_handler { struct llama_tool_call_handler {

View file

@ -662,7 +662,7 @@ struct server_context {
bool validate_model_chat_template(bool use_jinja) const { bool validate_model_chat_template(bool use_jinja) const {
llama_chat_message chat[] = {{"user", "test"}}; llama_chat_message chat[] = {{"user", "test"}};
const int res = llama_chat_apply_template(model, nullptr, chat, 1, true, nullptr, 0, use_jinja); const int res = llama_chat_apply_template(model, nullptr, chat, 1, true, nullptr, 0, use_jinja, nullptr, nullptr, nullptr);
return res > 0; return res > 0;
} }

View file

@ -378,17 +378,17 @@ extern "C" {
// used in chat template // used in chat template
typedef struct llama_tool_call { typedef struct llama_chat_message_tool_call {
const char * name; const char * name;
const char * arguments; const char * arguments;
} llama_tool_call; } llama_chat_message_tool_call;
typedef struct llama_chat_message { typedef struct llama_chat_message {
const char * role; const char * role;
const char * content; const char * content;
const char * tool; const char * tool;
const llama_tool_call * tool_calls; const llama_chat_message_tool_call * tool_calls;
uint32_t n_tool_calls; uint32_t n_tool_calls;
} llama_chat_message; } llama_chat_message;

View file

@ -21081,8 +21081,9 @@ static int32_t llama_chat_apply_template_internal(
context->set("tools", tools_val); context->set("tools", tools_val);
} }
auto tmpl_root = minja::Parser::parse(tmpl, { auto tmpl_root = minja::Parser::parse(tmpl, {
.trim_blocks = true, /* .trim_blocks = */ true,
.lstrip_blocks = true, /* .lstrip_blocks = */ true,
/* .keep_trailing_newline = */ false,
}); });
try { try {
dest = tmpl_root->render(context); dest = tmpl_root->render(context);