File size: 402 Bytes
f09410f 70b393b 736177a a6568e1 736177a 70b393b f09410f 70b393b 736177a 70b393b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
import torch
# Specify the correct path to your LoRA checkpoint file
lora_path = "./lora_trained_model.pt" # Replace with the correct file name
# Define the device (use CPU in this case)
device = torch.device("cpu")
# Load the checkpoint
checkpoint = torch.load(lora_path, map_location=device)
# Print the keys in the checkpoint to check the structure
print("Checkpoint keys:", checkpoint.keys())
|