refactoring: set the default qnn lib search path at CMakeLists.txt by GGML_QNN_DEFAULT_LIB_SEARCH_PATH

This commit is contained in:
hongruichen 2024-07-29 15:51:54 +08:00
parent 5ecbeb5842
commit 6da82947df
5 changed files with 28 additions and 40 deletions

View file

@ -20,13 +20,11 @@ enum QNNBackend {
/**
*
* @param device 0: QNN_BACKEND_CPU 1: QNN_BACKEND_GPU 2:
* QNN_BACKEND_NPU
* @param qnn_lib_path qnn library path, such as "/data/local/tmp/" on
* Android or specified in JNI layer
* @param device 0: QNN_BACKEND_CPU 1: QNN_BACKEND_GPU 2:QNN_BACKEND_NPU
* @param extend_lib_search_path extened lib search path for searching QNN backend dynamic libs
* @return
*/
GGML_API ggml_backend_t ggml_backend_qnn_init(size_t dev_num, const char *qnn_lib_path);
GGML_API ggml_backend_t ggml_backend_qnn_init(size_t dev_num, const char *extend_lib_search_path);
GGML_API bool ggml_backend_is_qnn(ggml_backend_t backend);

View file

@ -889,10 +889,12 @@ if (GGML_QNN)
find_library(LOG_LIB log)
find_library(ANDROID_LIB android)
set(GGML_EXTRA_LIBS ${GGML_EXTRA_LIBS} ${LOG_LIB} ${ANDROID_LIB})
set(GGML_QNN_DEFAULT_LIB_SEARCH_PATH "\"/data/local/tmp/\"")
else()
message(FATAL_ERROR "QNN now only available on Android")
endif()
add_compile_definitions(GGML_QNN_DEFAULT_LIB_SEARCH_PATH=${GGML_QNN_DEFAULT_LIB_SEARCH_PATH})
if (NOT DEFINED GGML_QNN_SDK_PATH)
# try read from environment variable
if (DEFINED ENV{QNN_SDK_PATH})

View file

@ -319,15 +319,8 @@ static ggml_guid_t ggml_backend_qnn_guid() {
return &guid;
}
static ggml_backend_t ggml_backend_qnn_reg_init(const char *params, void *user_data) {
if (nullptr == params) {
// QNN library path
// can be hardcoded to "/data/local/tmp/" for Android command line application
// or specified in JNI layer for Android APK
params = "/data/local/tmp/";
}
ggml_backend_t qnn_backend = ggml_backend_qnn_init((int)(intptr_t)user_data, params);
static ggml_backend_t ggml_backend_qnn_reg_init(const char *extend_lib_search_path, void *user_data) {
ggml_backend_t qnn_backend = ggml_backend_qnn_init((int)(intptr_t)user_data, extend_lib_search_path);
return qnn_backend;
}
@ -390,28 +383,25 @@ ggml_backend_buffer_type_t ggml_backend_qnn_buffer_type(size_t device) {
return &ggml_backend_qnn_buffer_types[device];
}
/**
*
* @param device 0: QNN_BACKEND_CPU 1: QNN_BACKEND_GPU 2: QNN_BACKEND_NPU
* @param qnn_lib_path qnn library path, such as "/data/local/tmp/" on Android or specified in JNI layer
* @return
*/
ggml_backend_t ggml_backend_qnn_init(size_t device, const char *qnn_lib_path) {
ggml_backend_t ggml_backend_qnn_init(size_t device, const char *extend_lib_search_path) {
int result = 0;
if (nullptr == qnn_lib_path) {
QNN_LOG_ERROR("invalid qnn lib path\n");
return nullptr;
if (!extend_lib_search_path) {
extend_lib_search_path = GGML_QNN_DEFAULT_LIB_SEARCH_PATH;
QNN_LOG_WARN("extend_lib_search_path is nullptr, will use " GGML_QNN_DEFAULT_LIB_SEARCH_PATH " as default");
}
QNN_LOG_DEBUG("device %d", device);
QNN_LOG_DEBUG("qnn_lib_path %s", qnn_lib_path);
QNN_LOG_DEBUG("extend_lib_search_path %s", extend_lib_search_path);
if (device >= GGML_QNN_MAX_DEVICES) {
QNN_LOG_ERROR("invalid device %d", device);
return nullptr;
}
std::string path = qnn_lib_path;
std::string path = extend_lib_search_path;
// TODO: Fix this for other platforms
#if defined(__ANDROID__) || defined(ANDROID)
if (QNN_BACKEND_NPU == device) {
if (0 == setenv("LD_LIBRARY_PATH",
(path + ":/vendor/dsp/cdsp:/vendor/lib64:/vendor/dsp/"
@ -438,8 +428,9 @@ ggml_backend_t ggml_backend_qnn_init(size_t device, const char *qnn_lib_path) {
QNN_LOG_ERROR("%s backend setenv failure\n", qnn::get_backend_name(device));
}
}
#endif
auto instance = std::make_shared<qnn::qnn_instance>(qnn_lib_path, g_qnn_mgr[device].lib, "");
auto instance = std::make_shared<qnn::qnn_instance>(extend_lib_search_path, g_qnn_mgr[device].lib, "");
result = instance->qnn_init(nullptr);
if (result != 0) {
QNN_LOG_WARN("init qnn subsystem failed with qnn backend %s, pls check why\n", qnn::get_backend_name(device));

View file

@ -5,7 +5,7 @@
#include <mutex>
#if (defined __ANDROID__) || (defined ANDROID)
#if defined(__ANDROID__) || defined(ANDROID)
#include <android/log.h>
#endif
@ -22,7 +22,7 @@ void qnn::internal_log(ggml_log_level level, const char * /*file*/, const char *
int len_prefix = snprintf(s_qnn_internal_log_buf, QNN_LOGBUF_LEN, "[%s, %d]: ", func, line);
int len = vsnprintf(s_qnn_internal_log_buf + len_prefix, QNN_LOGBUF_LEN - len_prefix, format, args);
if (len < (QNN_LOGBUF_LEN - len_prefix)) {
#if (defined __ANDROID__) || (defined ANDROID)
#if defined(__ANDROID__) || defined(ANDROID)
// for Android APK
__android_log_print(level, "ggml-qnn", "%s\n", s_qnn_internal_log_buf);
#endif

View file

@ -16706,18 +16706,15 @@ struct llama_context * llama_new_context_with_model(
}
}
#elif defined(GGML_USE_QNN)
if (model->n_gpu_layers > 0) {
//the second param is data path of prebuit QNN libs provided by Qualcomm
//can be hardcoded to "/data/local/tmp/" for Android command line application
//or specified in JNI layer for Android APK application
ggml_backend_t backend = ggml_backend_qnn_init(model->main_gpu, "/data/local/tmp/");
if (nullptr == backend) {
LLAMA_LOG_ERROR("%s: failed to initialize QNN backend\n", __func__);
llama_free(ctx);
return nullptr;
}
ctx->backends.push_back(backend);
if (model->n_gpu_layers > 0) {
ggml_backend_t backend = ggml_backend_qnn_init(model->main_gpu, nullptr);
if (nullptr == backend) {
LLAMA_LOG_ERROR("%s: failed to initialize QNN backend\n", __func__);
llama_free(ctx);
return nullptr;
}
ctx->backends.push_back(backend);
}
#endif
#ifdef GGML_USE_BLAS