change default finetune params lora_r and lora_alpha to match the n_rank parameters of 4
This commit is contained in:
parent
7a63d429af
commit
63cb374a99
1 changed files with 2 additions and 2 deletions
|
@ -2162,8 +2162,8 @@ struct train_params get_default_train_params() {
|
|||
params.n_examples = 1;
|
||||
params.n_predict = 1024;
|
||||
|
||||
params.lora_alpha = 100;
|
||||
params.lora_r = 100;
|
||||
params.lora_alpha = 4;
|
||||
params.lora_r = 4;
|
||||
|
||||
params.n_rank_attention_norm = 1;
|
||||
params.n_rank_wq = 4;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue