From b1657cb934d95ee28c3bd5667e48d46ce1c7da91 Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Thu, 5 Sep 2024 20:58:10 +0200 Subject: [PATCH] bring back missing --alias --- common/common.cpp | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/common/common.cpp b/common/common.cpp index 534cbe35c..9e959b02b 100644 --- a/common/common.cpp +++ b/common/common.cpp @@ -650,7 +650,6 @@ std::vector gpt_params_parser_init(gpt_params & params, llama_example sampler_type_names += llama_sampling_type_to_str(sampler_type) + ";"; } sampler_type_names.pop_back(); - const char split_delim = ','; /** @@ -1804,6 +1803,13 @@ std::vector gpt_params_parser_init(gpt_params & params, llama_example params.control_vector_layer_end = std::stoi(end); } )); + add_opt(llama_arg( + {"-a", "--alias"}, "STRING", + "set alias for model name (to be used by REST API)", + [¶ms](std::string value) { + params.model_alias = value; + } + ).set_examples({LLAMA_EXAMPLE_SERVER}).set_env("LLAMA_ARG_MODEL")); add_opt(llama_arg( {"-m", "--model"}, "FNAME", ex == LLAMA_EXAMPLE_EXPORT_LORA @@ -1950,7 +1956,7 @@ std::vector gpt_params_parser_init(gpt_params & params, llama_example {"-npp"}, "n0,n1,...", "number of prompt tokens", [¶ms](std::string value) { - auto p = string_split(value, split_delim); + auto p = string_split(value, ','); params.n_pp.insert(params.n_pp.end(), p.begin(), p.end()); } ).set_examples({LLAMA_EXAMPLE_BENCH})); @@ -1958,7 +1964,7 @@ std::vector gpt_params_parser_init(gpt_params & params, llama_example {"-ntg"}, "n0,n1,...", "number of text generation tokens", [¶ms](std::string value) { - auto p = string_split(value, split_delim); + auto p = string_split(value, ','); params.n_tg.insert(params.n_tg.end(), p.begin(), p.end()); } ).set_examples({LLAMA_EXAMPLE_BENCH})); @@ -1966,7 +1972,7 @@ std::vector gpt_params_parser_init(gpt_params & params, llama_example {"-npl"}, "n0,n1,...", "number of parallel prompts", [¶ms](std::string value) { - auto p = string_split(value, split_delim); + auto p = string_split(value, ','); params.n_pl.insert(params.n_pl.end(), p.begin(), p.end()); } ).set_examples({LLAMA_EXAMPLE_BENCH}));