From c359c843fdfff860758aaa7ae98434e5fcba6c76 Mon Sep 17 00:00:00 2001 From: Brian Date: Wed, 10 Jan 2024 10:37:44 +1100 Subject: [PATCH] Update llama.cpp model param log remove unneeded comments and convert from > to >= --- llama.cpp | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/llama.cpp b/llama.cpp index ddf44bf52..e211d5d45 100644 --- a/llama.cpp +++ b/llama.cpp @@ -3145,17 +3145,13 @@ static void llm_load_print_meta(llama_model_loader & ml, llama_model & model) { LLAMA_LOG_INFO("%s: rope_finetuned = %s\n", __func__, hparams.rope_finetuned ? "yes" : "unknown"); LLAMA_LOG_INFO("%s: model type = %s\n", __func__, llama_model_type_name(model.type)); LLAMA_LOG_INFO("%s: model ftype = %s\n", __func__, llama_model_ftype_name(model.ftype).c_str()); - if (ml.n_elements > 1e12) { - // Trillions Of Parameters + if (ml.n_elements >= 1e12) { LLAMA_LOG_INFO("%s: model params = %.2f T\n", __func__, ml.n_elements*1e-12); - } else if (ml.n_elements > 1e9) { - // Billions Of Parameters + } else if (ml.n_elements >= 1e9) { LLAMA_LOG_INFO("%s: model params = %.2f B\n", __func__, ml.n_elements*1e-9); - } else if (ml.n_elements > 1e6) { - // Millions Of Parameters + } else if (ml.n_elements >= 1e6) { LLAMA_LOG_INFO("%s: model params = %.2f M\n", __func__, ml.n_elements*1e-6); } else { - // Thousands Of Parameters LLAMA_LOG_INFO("%s: model params = %.2f K\n", __func__, ml.n_elements*1e-3); } if (ml.n_bytes < GiB) {