use die("msg") instead of replace GGML_ASSERT(!"msg") or throw std::runtime_error("msg")

This commit is contained in:
xaedes 2023-09-16 21:12:16 +02:00
parent 1d33ec5b1c
commit 1d09965179
No known key found for this signature in database
GPG key ID: 30030EDD817EA2B1
2 changed files with 5 additions and 5 deletions

View file

@ -109,7 +109,7 @@ struct ggml_tensor * randomize_tensor_normal(struct ggml_tensor * tensor, struct
} }
break; break;
default: default:
GGML_ASSERT(!"Unsupported tensor->n_dims"); die("Unsupported tensor->n_dims");
}; };
return tensor; return tensor;
} }
@ -153,7 +153,7 @@ struct ggml_tensor * randomize_tensor_uniform(struct ggml_tensor * tensor, struc
} }
break; break;
default: default:
GGML_ASSERT(!"Unsupported tensor->n_dims"); die("Unsupported tensor->n_dims");
}; };
return tensor; return tensor;
} }
@ -581,7 +581,7 @@ void load_opt_context_gguf(struct gguf_context * fctx, struct ggml_context * f_g
copy_tensor_by_name(opt->lbfgs.lms, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_S); copy_tensor_by_name(opt->lbfgs.lms, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_S);
copy_tensor_by_name(opt->lbfgs.lmy, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_Y); copy_tensor_by_name(opt->lbfgs.lmy, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_Y);
} else { } else {
throw std::runtime_error("unknown optimizer type\n"); die("unknown optimizer type\n");
} }
} }

View file

@ -1664,12 +1664,12 @@ int main(int argc, char ** argv) {
if (opt_param_count_changed) { if (opt_param_count_changed) {
print_lora_params(&lora.hparams); print_lora_params(&lora.hparams);
GGML_ASSERT(!"Provided rank differs from checkpoint file. To use different rank start finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting."); die("Provided rank differs from checkpoint file. To use different rank start finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting.");
// need to discard previous optimizer gradient statistics and opt_init with new shapes // need to discard previous optimizer gradient statistics and opt_init with new shapes
// TODO // TODO
} }
if (opt_past_changed) { if (opt_past_changed) {
GGML_ASSERT(!"Optimizer parameter '--opt-past N' differs from checkpoint file. To use different value finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting"); die("Optimizer parameter '--opt-past N' differs from checkpoint file. To use different value finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting");
// need to discard previous optimizer past function value statistics and opt_init with new shapes // need to discard previous optimizer past function value statistics and opt_init with new shapes
// TODO // TODO
} }