use die("msg") instead of replace GGML_ASSERT(!"msg") or throw std::runtime_error("msg")
This commit is contained in:
parent
1d33ec5b1c
commit
1d09965179
2 changed files with 5 additions and 5 deletions
|
@ -109,7 +109,7 @@ struct ggml_tensor * randomize_tensor_normal(struct ggml_tensor * tensor, struct
|
|||
}
|
||||
break;
|
||||
default:
|
||||
GGML_ASSERT(!"Unsupported tensor->n_dims");
|
||||
die("Unsupported tensor->n_dims");
|
||||
};
|
||||
return tensor;
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ struct ggml_tensor * randomize_tensor_uniform(struct ggml_tensor * tensor, struc
|
|||
}
|
||||
break;
|
||||
default:
|
||||
GGML_ASSERT(!"Unsupported tensor->n_dims");
|
||||
die("Unsupported tensor->n_dims");
|
||||
};
|
||||
return tensor;
|
||||
}
|
||||
|
@ -581,7 +581,7 @@ void load_opt_context_gguf(struct gguf_context * fctx, struct ggml_context * f_g
|
|||
copy_tensor_by_name(opt->lbfgs.lms, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_S);
|
||||
copy_tensor_by_name(opt->lbfgs.lmy, f_ggml_ctx, LLM_TENSOR_OPTIMIZER_LBFGS_MEMORY_Y);
|
||||
} else {
|
||||
throw std::runtime_error("unknown optimizer type\n");
|
||||
die("unknown optimizer type\n");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1664,12 +1664,12 @@ int main(int argc, char ** argv) {
|
|||
|
||||
if (opt_param_count_changed) {
|
||||
print_lora_params(&lora.hparams);
|
||||
GGML_ASSERT(!"Provided rank differs from checkpoint file. To use different rank start finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting.");
|
||||
die("Provided rank differs from checkpoint file. To use different rank start finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting.");
|
||||
// need to discard previous optimizer gradient statistics and opt_init with new shapes
|
||||
// TODO
|
||||
}
|
||||
if (opt_past_changed) {
|
||||
GGML_ASSERT(!"Optimizer parameter '--opt-past N' differs from checkpoint file. To use different value finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting");
|
||||
die("Optimizer parameter '--opt-past N' differs from checkpoint file. To use different value finetune from scratch with empty input checkpoint, e.g --checkpoint-in ''. Aborting");
|
||||
// need to discard previous optimizer past function value statistics and opt_init with new shapes
|
||||
// TODO
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue