wi-lab commited on
Commit
fe740e9
·
verified ·
1 Parent(s): 4d2bf4f

Update train.py

Browse files
Files changed (1) hide show
  1. train.py +1 -1
train.py CHANGED
@@ -351,7 +351,7 @@ def finetune(
351
  # Set up the optimizer
352
  optimizer = torch.optim.Adam(wrapper.parameters(), **optimizer_config)
353
  # Set up the scheduler for learning rate decay
354
- scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.2) # Example: Reduce LR by 10x every 10 epochs
355
 
356
  # Set up the loss criterion
357
  if criterion is None:
 
351
  # Set up the optimizer
352
  optimizer = torch.optim.Adam(wrapper.parameters(), **optimizer_config)
353
  # Set up the scheduler for learning rate decay
354
+ scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.8) # Example: Reduce LR by 10x every 10 epochs
355
 
356
  # Set up the loss criterion
357
  if criterion is None: