From a4747b2edb90b9fbf8cb7c3108ba973fc79d7152 Mon Sep 17 00:00:00 2001 From: Zack Zhiyuan Li Date: Mon, 4 Nov 2024 04:40:41 +0000 Subject: [PATCH 1/5] fix error on windows qwen2-audio/whisper.cpp:9935:38: error: '_O_BINARY' was not declared in this scope --- examples/qwen2-audio/whisper.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/examples/qwen2-audio/whisper.cpp b/examples/qwen2-audio/whisper.cpp index 7db2c24ea..6da9d268d 100644 --- a/examples/qwen2-audio/whisper.cpp +++ b/examples/qwen2-audio/whisper.cpp @@ -60,6 +60,11 @@ #include #include +#ifdef _WIN32 +#include // for _setmode +#include // for _O_BINARY +#endif + // third-party utilities // use your favorite implementations #define DR_WAV_IMPLEMENTATION From 6f1ed6e5cb1e8003b1b7146bc5aaf1e525bf9096 Mon Sep 17 00:00:00 2001 From: Zack Zhiyuan Li Date: Mon, 4 Nov 2024 04:54:51 +0000 Subject: [PATCH 2/5] Adding #include & --- examples/nexa-omni-audio/whisper.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/examples/nexa-omni-audio/whisper.cpp b/examples/nexa-omni-audio/whisper.cpp index 7db2c24ea..f7b585237 100644 --- a/examples/nexa-omni-audio/whisper.cpp +++ b/examples/nexa-omni-audio/whisper.cpp @@ -60,6 +60,11 @@ #include #include +#ifdef _WIN32 +#include +#include +#endif + // third-party utilities // use your favorite implementations #define DR_WAV_IMPLEMENTATION From 141968108994905dc481863b75e0837cb693f5e3 Mon Sep 17 00:00:00 2001 From: Zack Zhiyuan Li Date: Mon, 4 Nov 2024 05:45:52 +0000 Subject: [PATCH 3/5] disable for MSC_VER --- common/common-nexa.h | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/common/common-nexa.h b/common/common-nexa.h index 1135eae57..9eb8848a8 100644 --- a/common/common-nexa.h +++ b/common/common-nexa.h @@ -11,8 +11,17 @@ #include #include -#include -#define NEXA_CLASS_NAME (abi::__cxa_demangle(typeid(*this).name(), nullptr, nullptr, nullptr)) +// Replace the cxxabi.h include and NEXA_CLASS_NAME definition with cross-platform version +#ifdef _MSC_VER + // Windows/MSVC version + #include + #define NEXA_CLASS_NAME (typeid(*this).name()) +#else + // Unix/GCC/Clang version + #include + #define NEXA_CLASS_NAME (abi::__cxa_demangle(typeid(*this).name(), nullptr, nullptr, nullptr)) +#endif + #define NEXA_LOG(fmt, ...) fprintf(stderr, "%s::%s: " fmt "\n", NEXA_CLASS_NAME, __func__, ##__VA_ARGS__) // Prints the content of a ggml_tensor with specified precision. Can use the backend if available. From d42e0371f84b413c25511328d75f079962c6fbbb Mon Sep 17 00:00:00 2001 From: Zack Zhiyuan Li Date: Mon, 4 Nov 2024 22:50:33 +0000 Subject: [PATCH 4/5] remove C++20 style --- examples/nexa-omni-audio/omni.cpp | 21 ++++++++++----------- examples/qwen2-audio/qwen2.cpp | 23 +++++++++++------------ 2 files changed, 21 insertions(+), 23 deletions(-) diff --git a/examples/nexa-omni-audio/omni.cpp b/examples/nexa-omni-audio/omni.cpp index d2701c2c1..0982a79df 100644 --- a/examples/nexa-omni-audio/omni.cpp +++ b/examples/nexa-omni-audio/omni.cpp @@ -565,17 +565,16 @@ bool omni_params_parse(int argc, char **argv, omni_params ¶ms) static omni_params get_omni_params_from_context_params(omni_context_params ¶ms) { - omni_params all_params = { - .gpt = { - .n_gpu_layers = params.n_gpu_layers, - .model = params.model, - .prompt = params.prompt, - }, - .whisper = { - .model = params.mmproj, - .fname_inp = {params.file}, - }, - }; + omni_params all_params; + + // Initialize gpt params + all_params.gpt.n_gpu_layers = params.n_gpu_layers; + all_params.gpt.model = params.model; + all_params.gpt.prompt = params.prompt; + + // Initialize whisper params + all_params.whisper.model = params.mmproj; + all_params.whisper.fname_inp = {params.file}; if (all_params.gpt.n_threads <= 0) { diff --git a/examples/qwen2-audio/qwen2.cpp b/examples/qwen2-audio/qwen2.cpp index c1636139b..d14145835 100644 --- a/examples/qwen2-audio/qwen2.cpp +++ b/examples/qwen2-audio/qwen2.cpp @@ -565,18 +565,17 @@ bool omni_params_parse(int argc, char **argv, omni_params ¶ms) static omni_params get_omni_params_from_context_params(omni_context_params ¶ms) { - omni_params all_params = { - .gpt = { - .n_gpu_layers = params.n_gpu_layers, - .model = params.model, - .prompt = params.prompt, - }, - .whisper = { - .model = params.mmproj, - .fname_inp = {params.file}, - }, - }; - + omni_params all_params; + + // Initialize gpt params + all_params.gpt.n_gpu_layers = params.n_gpu_layers; + all_params.gpt.model = params.model; + all_params.gpt.prompt = params.prompt; + + // Initialize whisper params + all_params.whisper.model = params.mmproj; + all_params.whisper.fname_inp = {params.file}; + if (all_params.gpt.n_threads <= 0) { all_params.gpt.n_threads = std::thread::hardware_concurrency(); From 05853eb861d522cc51c450efbabdc1470118cf5b Mon Sep 17 00:00:00 2001 From: Zack Zhiyuan Li Date: Mon, 4 Nov 2024 23:03:49 +0000 Subject: [PATCH 5/5] remove C++20 syntax --- examples/nexa-omni-audio/omni.cpp | 13 ++++++------- examples/qwen2-audio/qwen2.cpp | 13 ++++++------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/examples/nexa-omni-audio/omni.cpp b/examples/nexa-omni-audio/omni.cpp index 0982a79df..f55dc3d5c 100644 --- a/examples/nexa-omni-audio/omni.cpp +++ b/examples/nexa-omni-audio/omni.cpp @@ -523,13 +523,12 @@ bool omni_context_params_parse(int argc, char **argv, omni_context_params ¶m omni_context_params omni_context_default_params() { - omni_context_params params = { - .model = "", - .mmproj = "", - .file = "", - .prompt = "this conversation talks about", - .n_gpu_layers = -1, - }; + omni_context_params params; + params.model = ""; + params.mmproj = ""; + params.file = ""; + params.prompt = "this conversation talks about"; + params.n_gpu_layers = -1; return params; } diff --git a/examples/qwen2-audio/qwen2.cpp b/examples/qwen2-audio/qwen2.cpp index d14145835..be7d74d6d 100644 --- a/examples/qwen2-audio/qwen2.cpp +++ b/examples/qwen2-audio/qwen2.cpp @@ -523,13 +523,12 @@ bool omni_context_params_parse(int argc, char **argv, omni_context_params ¶m omni_context_params omni_context_default_params() { - omni_context_params params = { - .model = "", - .mmproj = "", - .file = "", - .prompt = "this conversation talks about", - .n_gpu_layers = -1, - }; + omni_context_params params; + params.model = ""; + params.mmproj = ""; + params.file = ""; + params.prompt = "this conversation talks about"; + params.n_gpu_layers = -1; return params; }