From 4bd55ec02c4039a2ec7eec36ce8869c239289058 Mon Sep 17 00:00:00 2001 From: ngxson Date: Tue, 23 Jul 2024 20:48:44 +0200 Subject: [PATCH] better check --- examples/export-lora/export-lora.cpp | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/examples/export-lora/export-lora.cpp b/examples/export-lora/export-lora.cpp index 186b3625b..7bf06463a 100644 --- a/examples/export-lora/export-lora.cpp +++ b/examples/export-lora/export-lora.cpp @@ -197,12 +197,15 @@ struct lora_merge_ctx { // check if all lora adapters have the same tensors // TODO: remove this when we can support merging subset of adapters. Ref: https://github.com/ggerganov/llama.cpp/pull/8607#discussion_r1686027777 + static const char * err_no_subset_adapter = "Input adapters do not have the same list of tensors. This is not yet supported. Please merge the adapter one-by-one instead of merging all at once."; if (adapters.size() > 1) { - auto & base_adapter = adapters[0]; for (size_t i = 1; i < adapters.size(); ++i) { - for (auto & it : base_adapter->tensors) { - if (base_adapter->get_tensor(it.first) == nullptr) { - throw std::runtime_error("Input adapters do not have the same list of tensors. This is not yet supported. Please merge the adapter one-by-one instead of merging all at once."); + if (adapters[0]->tensors.size() != adapters[i]->tensors.size()) { + throw std::runtime_error(err_no_subset_adapter); + } + for (auto & it : adapters[0]->tensors) { + if (adapters[0]->get_tensor(it.first) == nullptr) { + throw std::runtime_error(err_no_subset_adapter); } } }