llama : fix progress dots (#11730)

* Update llama.cpp

For display progress dots in terminal.
Without this it didn't display dots progress during loading model from file.

* Update llama.cpp

removed trailing spaces
This commit is contained in:
magicse 2025-02-07 15:48:47 +02:00 committed by GitHub
parent c026ba3c23
commit 333820d749
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -9428,7 +9428,6 @@ static struct llama_model * llama_model_load_from_file_impl(
struct llama_model_params params) {
ggml_time_init();
llama_model * model = new llama_model(params);
unsigned cur_percentage = 0;
if (params.progress_callback == NULL) {
@ -9447,6 +9446,8 @@ static struct llama_model * llama_model_load_from_file_impl(
};
}
llama_model * model = new llama_model(params);
// create list of devices to use with this model
if (params.devices) {
for (ggml_backend_dev_t * dev = params.devices; *dev; ++dev) {