From ef7eb339372c87d58bc47d2b63a70d02c5150a93 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Tue, 5 Mar 2024 11:51:43 +0200 Subject: [PATCH] server : remove llava/clip objects from build --- Makefile | 5 ++--- examples/server/CMakeLists.txt | 2 +- examples/server/oai.hpp | 10 +++------- examples/server/server.cpp | 1 + examples/server/utils.hpp | 1 - 5 files changed, 7 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index 4f26c0463..c37d61da8 100644 --- a/Makefile +++ b/Makefile @@ -724,10 +724,9 @@ save-load-state: examples/save-load-state/save-load-state.cpp ggml.o llama.o $(C $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) $(CXX) $(CXXFLAGS) $(filter-out %.h $<,$^) $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) -server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp examples/llava/clip.cpp examples/llava/clip.h examples/llava/llava.h examples/llava/llava.cpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) +server: examples/server/server.cpp examples/server/oai.hpp examples/server/utils.hpp examples/server/httplib.h examples/server/json.hpp examples/server/index.html.hpp examples/server/index.js.hpp examples/server/completion.js.hpp common/stb_image.h ggml.o llama.o $(COMMON_DEPS) grammar-parser.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) - $(CXX) $(CXXFLAGS) -c examples/llava/clip.cpp -o $(call GET_OBJ_FILE, examples/llava/clip.cpp) -Wno-cast-qual - $(CXX) $(CXXFLAGS) -Iexamples/server $(filter-out %.h %.hpp $< examples/llava/clip.cpp,$^) $(call GET_OBJ_FILE, $<) $(call GET_OBJ_FILE, examples/llava/clip.cpp) -o $@ $(LDFLAGS) $(LWINSOCK2) + $(CXX) $(CXXFLAGS) $(filter-out %.h %.hpp $<,$^) -Iexamples/server $(call GET_OBJ_FILE, $<) -o $@ $(LDFLAGS) $(LWINSOCK2) gguf: examples/gguf/gguf.cpp ggml.o $(OBJS) $(CXX) $(CXXFLAGS) -c $< -o $(call GET_OBJ_FILE, $<) diff --git a/examples/server/CMakeLists.txt b/examples/server/CMakeLists.txt index cc13b2d63..1fab67724 100644 --- a/examples/server/CMakeLists.txt +++ b/examples/server/CMakeLists.txt @@ -6,7 +6,7 @@ install(TARGETS ${TARGET} RUNTIME) target_compile_definitions(${TARGET} PRIVATE SERVER_VERBOSE=$ ) -target_link_libraries(${TARGET} PRIVATE common llava ${CMAKE_THREAD_LIBS_INIT}) +target_link_libraries(${TARGET} PRIVATE common ${CMAKE_THREAD_LIBS_INIT}) if (WIN32) TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32) endif() diff --git a/examples/server/oai.hpp b/examples/server/oai.hpp index ff4ad6994..dddc26b69 100644 --- a/examples/server/oai.hpp +++ b/examples/server/oai.hpp @@ -1,15 +1,11 @@ #pragma once -#include -#include -#include -#include -#include -#include - #include "json.hpp" #include "utils.hpp" +#include +#include + #define DEFAULT_OAICOMPAT_MODEL "gpt-3.5-turbo-0613" using json = nlohmann::json; diff --git a/examples/server/server.cpp b/examples/server/server.cpp index 517d20f5b..e55ebedc7 100644 --- a/examples/server/server.cpp +++ b/examples/server/server.cpp @@ -356,6 +356,7 @@ struct llama_server_context { void validate_model_chat_template(server_params & sparams) { llama_chat_message chat[] = {{"user", "test"}}; + std::vector buf(1); int res = llama_chat_apply_template(model, nullptr, chat, 1, true, buf.data(), buf.size()); if (res < 0) { diff --git a/examples/server/utils.hpp b/examples/server/utils.hpp index d7cba272e..9760d035c 100644 --- a/examples/server/utils.hpp +++ b/examples/server/utils.hpp @@ -10,7 +10,6 @@ #include #include #include -#include using json = nlohmann::json;