From 333820d7491cd31c707a340ff23b984a84e40154 Mon Sep 17 00:00:00 2001 From: magicse Date: Fri, 7 Feb 2025 15:48:47 +0200 Subject: [PATCH] llama : fix progress dots (#11730) * Update llama.cpp For display progress dots in terminal. Without this it didn't display dots progress during loading model from file. * Update llama.cpp removed trailing spaces --- src/llama.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/llama.cpp b/src/llama.cpp index 3d5a928a8..c3da3c43b 100644 --- a/src/llama.cpp +++ b/src/llama.cpp @@ -9428,7 +9428,6 @@ static struct llama_model * llama_model_load_from_file_impl( struct llama_model_params params) { ggml_time_init(); - llama_model * model = new llama_model(params); unsigned cur_percentage = 0; if (params.progress_callback == NULL) { @@ -9447,6 +9446,8 @@ static struct llama_model * llama_model_load_from_file_impl( }; } + llama_model * model = new llama_model(params); + // create list of devices to use with this model if (params.devices) { for (ggml_backend_dev_t * dev = params.devices; *dev; ++dev) {