lora : improve compat with mergekit-extract-lora (#11131)

* (wip) support mergekit-extracted lora

* support mergekit-extract-lora

* use lora->get_scale

* correct comment

* correct norm name & condition

* add some hints
This commit is contained in:
Xuan Son Nguyen 2025-01-08 15:59:53 +01:00 committed by GitHub
parent c07d437bbd
commit 4d2b3d8804
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 74 additions and 12 deletions

View file

@ -45,6 +45,13 @@ struct llama_lora_weight {
struct ggml_tensor * a = nullptr;
struct ggml_tensor * b = nullptr;
// get actual scale based on rank and alpha
float get_scale(float alpha, float adapter_scale) {
const float rank = (float) b->ne[0];
const float scale = alpha ? adapter_scale * alpha / rank : adapter_scale;
return scale;
}
llama_lora_weight() = default;
llama_lora_weight(struct ggml_tensor * a, struct ggml_tensor * b) : a(a), b(b) {}
};