File size: 433 Bytes
5555222 6469eec 5555222 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
import torch
import os
lora_path = "lora_trained_model.pt"
if os.path.exists(lora_path):
try:
state_dict = torch.load(lora_path, map_location="cpu")
print("✅ LoRA checkpoint loaded successfully!")
print("Keys in the checkpoint:", state_dict.keys()) # Print available keys
except Exception as e:
print(f"❌ Error loading LoRA checkpoint: {e}")
else:
print("❌ LoRA file not found.") |