diff --git a/common/train.cpp b/common/train.cpp index 99c319253..287fe5d93 100644 --- a/common/train.cpp +++ b/common/train.cpp @@ -1115,7 +1115,11 @@ void print_common_train_usage(int /*argc*/, char ** argv, const struct train_par bool consume_common_train_arg(int argc, char ** argv, int * idx, struct train_params_common * params, bool * invalid_param) { int& i = *idx; - char * arg = argv[i]; + std::string arg = argv[i]; + const std::string arg_prefix = "--"; + if (arg.compare(0, arg_prefix.size(), arg_prefix) == 0) { + std::replace(arg.begin(), arg.end(), '_', '-'); + } if (arg == "--train-data") { if (++i >= argc) { *invalid_param = true; diff --git a/examples/finetune/finetune.cpp b/examples/finetune/finetune.cpp index 308e3d592..17b559a10 100644 --- a/examples/finetune/finetune.cpp +++ b/examples/finetune/finetune.cpp @@ -514,8 +514,6 @@ static void init_lora(const struct my_llama_model * model, struct my_llama_lora ggml_allocr_free(alloc); } - - static void randomize_lora(struct my_llama_lora * lora, int seed, float mean, float std, float min, float max) { const uint32_t n_layer = lora->layers.size();