From f77b79d1c5c2b309150acfe2161d7446d348a9ec Mon Sep 17 00:00:00 2001 From: Te993 <3923106166@qq.com> Date: Mon, 6 Jan 2025 18:06:11 +0800 Subject: [PATCH] omni vlm add streaming --- examples/omni-vlm/omni-vlm-wrapper.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/omni-vlm/omni-vlm-wrapper.cpp b/examples/omni-vlm/omni-vlm-wrapper.cpp index 60d6e9d8c..ea3326294 100644 --- a/examples/omni-vlm/omni-vlm-wrapper.cpp +++ b/examples/omni-vlm/omni-vlm-wrapper.cpp @@ -32,6 +32,9 @@ static struct llama_model* model; static struct omnivlm_context* ctx_omnivlm; static std::unique_ptr g_oss = nullptr; +static bool eval_id(struct llama_context * ctx_llama, int id, int * n_past); +static void omnivlm_free(struct omnivlm_context * ctx_omnivlm); + struct omni_streaming_sample { struct common_sampler * ctx_sampling_; std::string image_;