From 16f45c4deceadb1f18283fb0e384349f1f2cf389 Mon Sep 17 00:00:00 2001 From: l3utterfly Date: Wed, 4 Oct 2023 18:01:50 +0800 Subject: [PATCH] updated implementation for hparam comparison to handle inf and NaN --- llama.cpp | 63 +++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 18 deletions(-) diff --git a/llama.cpp b/llama.cpp index 5205be576..e5a72a2ba 100644 --- a/llama.cpp +++ b/llama.cpp @@ -123,6 +123,28 @@ static void replace_all(std::string & s, const std::string & search, const std:: } s = std::move(result); } + +bool is_float_eq(float a, float b, float abs_tol) { + // Check for non-negative tolerance + if (abs_tol < 0.0) { + throw std::invalid_argument("Tolerance must be non-negative"); + } + + // Exact equality check + if (a == b) { + return true; + } + + // Check for infinities + if (std::isinf(a) || std::isinf(b)) { + return false; + } + + // Regular comparison using the provided absolute tolerance + double diff = std::fabs(b - a); + return (diff <= abs_tol); +} + #ifdef GGML_USE_CPU_HBM #include #endif @@ -930,8 +952,6 @@ static const size_t kB = 1024; static const size_t MB = kB*kB; static const size_t GB = kB*kB*kB; -const double EPSILON = 1e-9; - struct llama_hparams { bool vocab_only; uint32_t n_vocab; @@ -949,23 +969,30 @@ struct llama_hparams { float rope_freq_base_train; float rope_freq_scale_train; - bool operator!=(const llama_hparams & other) const { - if(this->vocab_only != other.vocab_only) return true; - if(this->n_vocab != other.n_vocab) return true; - if(this->n_ctx_train != other.n_ctx_train) return true; - if(this->n_embd != other.n_embd) return true; - if(this->n_head != other.n_head) return true; - if(this->n_head_kv != other.n_head_kv) return true; - if(this->n_layer != other.n_layer) return true; - if(this->n_rot != other.n_rot) return true; - if(this->n_ff != other.n_ff) return true; - - if(std::abs(this->f_norm_eps - other.f_norm_eps) > EPSILON) return true; - if(std::abs(this->f_norm_rms_eps - other.f_norm_rms_eps) > EPSILON) return true; - if(std::abs(this->rope_freq_base_train - other.rope_freq_base_train) > EPSILON) return true; - if(std::abs(this->rope_freq_scale_train - other.rope_freq_scale_train) > EPSILON) return true; + bool operator==(const llama_hparams & other) const { + if (this->vocab_only != other.vocab_only) return false; + if (this->n_vocab != other.n_vocab) return false; + if (this->n_ctx_train != other.n_ctx_train) return false; + if (this->n_embd != other.n_embd) return false; + if (this->n_head != other.n_head) return false; + if (this->n_head_kv != other.n_head_kv) return false; + if (this->n_layer != other.n_layer) return false; + if (this->n_rot != other.n_rot) return false; + if (this->n_ff != other.n_ff) return false; - return false; + const float EPSILON = 1e-9; + + if (!is_float_eq(this->f_norm_eps, other.f_norm_eps, EPSILON)) return false; + if (!is_float_eq(this->f_norm_rms_eps, other.f_norm_rms_eps, EPSILON)) return false; + if (!is_float_eq(this->rope_freq_base_train, other.rope_freq_base_train, EPSILON)) return false; + if (!is_float_eq(this->rope_freq_scale_train, other.rope_freq_scale_train, EPSILON)) return false; + + return true; + } + + // implement != explicitly using the "==" implementation above so we don't get a warning about it + bool operator!=(const llama_hparams & other) const { + return !(*this == other); } uint32_t n_gqa() const {