tool-call: fix Makefile

This commit is contained in:
Olivier Chafik 2024-10-02 00:06:42 +01:00
parent c36a196f53
commit c76b14501e
2 changed files with 10 additions and 7 deletions

View file

@ -55,7 +55,6 @@ TEST_TARGETS = \
tests/test-grammar-parser \
tests/test-json-schema-to-grammar \
tests/test-minja \
tests/test-tool-call \
tests/test-llama-grammar \
tests/test-log \
tests/test-model-load-cancel \
@ -64,6 +63,7 @@ TEST_TARGETS = \
tests/test-quantize-perf \
tests/test-rope \
tests/test-sampling \
tests/test-tool-call \
tests/test-tokenizer-0 \
tests/test-tokenizer-1-bpe \
tests/test-tokenizer-1-spm
@ -934,7 +934,6 @@ OBJ_LLAMA = \
OBJ_COMMON = \
common/common.o \
common/chat-template.o \
common/arg.o \
common/log.o \
common/console.o \
@ -1171,12 +1170,14 @@ $(LIB_LLAMA_S): \
common/common.o: \
common/common.cpp \
common/common.h \
common/chat-template.cpp \
common/chat-template.h \
common/chat-template.hpp \
common/console.h \
common/sampling.h \
common/json.hpp \
common/json-schema-to-grammar.h \
common/minja.hpp \
common/tool-call.cpp \
common/tool-call.h \
include/llama.h
$(CXX) $(CXXFLAGS) -c $< -o $@
@ -1468,9 +1469,11 @@ llama-server: \
examples/server/prompt-formats.js.hpp \
examples/server/json-schema-to-grammar.mjs.hpp \
examples/server/loading.html.hpp \
common/chat-template.h \
common/chat-template.hpp \
common/json.hpp \
common/minja.hpp \
common/stb_image.h \
common/tool-call.h \
$(OBJ_ALL)
$(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<)
$(CXX) $(CXXFLAGS) $(filter-out %.h %.hpp $<,$^) -Iexamples/server $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) $(LWINSOCK2)

View file

@ -243,14 +243,14 @@ static void test_parsing() {
"{\"name\": \"unknown_function\", \"arguments\": {\"arg1\": 1}}", json::array());
}
void test_tool_call_style(const std::string & template_file, llama_tool_call_style expected) {
static void test_tool_call_style(const std::string & template_file, llama_tool_call_style expected) {
const minja::chat_template tmpl(read_file(template_file), "<s>", "</s>");
auto tool_call_style = llama_tool_call_style_detect(tmpl);
std::cout << "# Testing tool call style of: " << template_file << std::endl << std::flush;
assert_equals(expected, tool_call_style);
}
void test_tool_call_style_detection() {
static void test_tool_call_style_detection() {
test_tool_call_style("tests/chat/templates/meetkai-functionary-medium-v3.1.jinja", FunctionaryV3Llama31);
test_tool_call_style("tests/chat/templates/meetkai-functionary-medium-v3.2.jinja", FunctionaryV3Llama3);
test_tool_call_style("tests/chat/templates/meta-llama-Meta-Llama-3.1-8B-Instruct.jinja", Llama31);