From 360c365eb6b6142f7226ce0bcd6c1c969ce97537 Mon Sep 17 00:00:00 2001 From: FNsi <125447286+FNsi@users.noreply.github.com> Date: Sat, 20 May 2023 08:27:37 +0800 Subject: [PATCH] Update merge.py --- merge.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/merge.py b/merge.py index 3f0670730..56418e65a 100644 --- a/merge.py +++ b/merge.py @@ -4,8 +4,8 @@ import json import torch import argparse import transformers -from transformers import LlamaTokenizer, LlamaConfig, LlamaForCausalLM -from peft import PeftModel, LoraConfig, LoraModel +from transformers import LlamaTokenizer, LlamaForCausalLM +from peft import PeftModel # args parser = argparse.ArgumentParser() @@ -45,9 +45,7 @@ model = PeftModel.from_pretrained( print(f">>> merging lora...") -#Why 'LlamaForCausalLM' object has no attribute 'merge_and_unload' ???????? -#okay, it works, i don't know why it didn't. - +#Using new Peft function merge Lora model = model.merge_and_unload() model.save_pretrained(args.out_path)