Merging Model Weights
from transformers import LlamaForCausalLM, AutoTokenizer
from peft import PeftModel
base_model_path = "/home/paperspace/axolotl/models/Meta-Llama-3-8B"
lora_model_path = "path/to/llama3-out"
tokenizer = AutoTokenizer.from_pretrained(base_model_path)
base_model = LlamaForCausalLM.from_pretrained(base_model_path)
lora_model = PeftModel.from_pretrained(base_model, lora_model_path)merged_model = lora_model.merge_and_unload()merged_model.save_pretrained("path/to/merged_model", safe_serialization=True)Last updated
Was this helpful?

