Update train.py
Browse files
train.py
CHANGED
@@ -351,7 +351,7 @@ def finetune(
|
|
351 |
# Set up the optimizer
|
352 |
optimizer = torch.optim.Adam(wrapper.parameters(), **optimizer_config)
|
353 |
# Set up the scheduler for learning rate decay
|
354 |
-
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=
|
355 |
|
356 |
# Set up the loss criterion
|
357 |
if criterion is None:
|
|
|
351 |
# Set up the optimizer
|
352 |
optimizer = torch.optim.Adam(wrapper.parameters(), **optimizer_config)
|
353 |
# Set up the scheduler for learning rate decay
|
354 |
+
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=20, gamma=0.8) # Example: Reduce LR by 10x every 10 epochs
|
355 |
|
356 |
# Set up the loss criterion
|
357 |
if criterion is None:
|