From 1d099651792713da8a0e3aaf0ca2594dfcc5b40d Mon Sep 17 00:00:00 2001 From: xaedes Date: Sat, 16 Sep 2023 21:12:16 +0200 Subject: [PATCH] use die("msg") instead of replace GGML_ASSERT(!"msg") or throw std::runtime_error("msg") --- common/train.cpp | 6 +++--- examples/finetune/finetune.cpp | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/common/train.cpp b/common/train.cpp index f38c25bd5..1eec3e3fb 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -109,7 +109,7 @@ struct ggml_tensor * randomize_tensor_normal(struct ggml_tensor * tensor, struct } break; default: - GGML_ASSERT(!"Unsupported tensor->n_dims"); + die("Unsupported tensor->n_dims"); }; return tensor; } @@ -153,7 +153,7 @@ struct ggml_tensor * randomize_tensor_uniform(struct ggml_tensor * tensor, struc } break; default: - GGML_ASSERT(!"Unsupported tensor->n_dims"); + die("Unsupported tensor->n_dims"); }; return tensor; } @@ -581,7 +581,7 @@ void load_opt_context_gguf(struct gguf_context * fctx, struct ggml_context * f_g copy_tensor_by_name(opt->lbfgs.lms, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_S); copy_tensor_by_name(opt->lbfgs.lmy, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_Y); } else { - throw std::runtime_error("unknown optimizer type\n"); + die("unknown optimizer type\n"); } } diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index c1227897c..548075493 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -1664,12 +1664,12 @@ int main(int argc, char ** argv) { if (opt_param_count_changed) { print_lora_params(&lora.hparams); - GGML_ASSERT(!"Provided rank differs from checkpoint file. To use different rank start finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting."); + die("Provided rank differs from checkpoint file. To use different rank start finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting."); // need to discard previous optimizer gradient statistics and opt_init with new shapes // TODO } if (opt_past_changed) { - GGML_ASSERT(!"Optimizer parameter '--opt-past N' differs from checkpoint file. To use different value finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting"); + die("Optimizer parameter '--opt-past N' differs from checkpoint file. To use different value finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting"); // need to discard previous optimizer past function value statistics and opt_init with new shapes // TODO }