import torch | |
import os | |
lora_path = "lora_trained_model.pt" | |
if os.path.exists(lora_path): | |
try: | |
state_dict = torch.load(lora_path, map_location="cpu") | |
print("β LoRA checkpoint loaded successfully!") | |
print("Keys in the checkpoint:", state_dict.keys()) # Print available keys | |
except Exception as e: | |
print(f"β Error loading LoRA checkpoint: {e}") | |
else: | |
print("β LoRA file not found.") |