From 3479f516ea55c9a278986e9a300a163979be4177 Mon Sep 17 00:00:00 2001 From: Yicheng Qian Date: Fri, 22 Nov 2024 14:01:42 -0800 Subject: [PATCH] udpate prompt template in wrapper --- examples/omni-vlm/CMakeLists.txt | 9 ++++++++- examples/omni-vlm/omni-vlm-wrapper.cpp | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/examples/omni-vlm/CMakeLists.txt b/examples/omni-vlm/CMakeLists.txt index b6d41b050..4e9413bfc 100644 --- a/examples/omni-vlm/CMakeLists.txt +++ b/examples/omni-vlm/CMakeLists.txt @@ -40,7 +40,14 @@ target_compile_features(${TARGET} PRIVATE cxx_std_11) #=== for omni-vlm-wrapper add_library(omni_vlm_wrapper_shared SHARED omni-vlm-wrapper.cpp $) target_link_libraries(omni_vlm_wrapper_shared PRIVATE common ggml_llama llama ${CMAKE_THREAD_LIBS_INIT}) -install(TARGETS omni_vlm_wrapper_shared LIBRARY) +# For Nexa SDK library installation +set_target_properties(omni_vlm_wrapper_shared PROPERTIES + PUBLIC_HEADER "omni-vlm-wrapper.h" + POSITION_INDEPENDENT_CODE ON + OUTPUT_NAME "omni_vlm_wrapper_shared") +install(TARGETS omni_vlm_wrapper_shared + LIBRARY + PUBLIC_HEADER DESTINATION include) # set(TARGET omni-vlm-wrapper-cli) # add_executable(${TARGET} omni-vlm-wrapper-cli.cpp) diff --git a/examples/omni-vlm/omni-vlm-wrapper.cpp b/examples/omni-vlm/omni-vlm-wrapper.cpp index ba0749d06..00d50cf15 100644 --- a/examples/omni-vlm/omni-vlm-wrapper.cpp +++ b/examples/omni-vlm/omni-vlm-wrapper.cpp @@ -255,7 +255,7 @@ const char* omnivlm_inference(const char *prompt, const char *imag_path) { if (params.omni_vlm_version == "vlm-81-ocr") { params.prompt = "<|im_start|>system\nYou are Nano-Omni-VLM, created by Nexa AI. You are a helpful assistant.<|im_end|>\n<|im_start|>user\n <|ocr_start|><|vision_start|><|image_pad|><|vision_end|><|ocr_end|><|im_end|>"; } else if (params.omni_vlm_version == "vlm-81-instruct" || params.omni_vlm_version == "nano-vlm-instruct") { - params.prompt = "<|im_start|>system\nYou are Nano-Omni-VLM, created by Nexa AI. You are a helpful assistant.<|im_end|>\n<|im_start|>user\n" + params.prompt + "\n<|vision_start|><|image_pad|><|vision_end|><|im_end|>"; + params.prompt = "<|im_start|>system\nYou are Nano-Omni-VLM, created by Nexa AI. You are a helpful assistant.<|im_end|>\n<|im_start|>user\n\n<|vision_start|><|image_pad|><|vision_end|>" + params.prompt + "<|im_end|>"; } else { LOG_TEE("%s : error: you set wrong vlm version info:'%s'.\n", __func__, params.omni_vlm_version.c_str()); throw std::runtime_error("You set wrong vlm_version info strings.");