import torch # Specify the correct path to your LoRA checkpoint file lora_path = "./lora_trained_model.pt" # Replace with the correct file name # Define the device (use CPU in this case) device = torch.device("cpu") # Load the checkpoint checkpoint = torch.load(lora_path, map_location=device) # Print the keys in the checkpoint to check the structure print("Checkpoint keys:", checkpoint.keys())