ggml : fix rope + llama minor optimizations (#3560)

* Minor fixes and fixed memleak

* Using const auto references in range-based loop C++17
This commit is contained in:
Herman Semenov 2023-10-20 10:02:12 +00:00 committed by GitHub
parent e78f3ef24a
commit f439e506e8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 7 additions and 7 deletions

View file

@ -1425,7 +1425,7 @@ void train_opt_callback(void * vdata, int accum_step, float * sched, bool * canc
int impr_plot = -(int)(1 + (opt->loss_before - opt->loss_after) * 10.0f + 0.5f);
if (impr_plot > 0) impr_plot = 0;
if (std::isnan(opt->loss_before) || std::isnan(opt->loss_before)) impr_plot = 0;
if (std::isnan(opt->loss_before) || std::isnan(opt->loss_after)) impr_plot = 0;
printf("%s: iter=%6d sample=%zu/%zu sched=%f loss=%f",
__func__, opt->iter, std::min(1+train->shuffle_next_sample, train->shuffle_sample_count), train->shuffle_sample_count,
*sched, opt->loss_after);