|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 30.0, |
|
"global_step": 17040, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.9413145539906104e-05, |
|
"loss": 0.1627, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.882629107981221e-05, |
|
"loss": 0.1324, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.8239436619718312e-05, |
|
"loss": 0.1168, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.7652582159624414e-05, |
|
"loss": 0.1056, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.706572769953052e-05, |
|
"loss": 0.0945, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 1.6478873239436623e-05, |
|
"loss": 0.0872, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.5892018779342725e-05, |
|
"loss": 0.0804, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.5305164319248827e-05, |
|
"loss": 0.0753, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.4718309859154931e-05, |
|
"loss": 0.0694, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 1.4131455399061034e-05, |
|
"loss": 0.065, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 1.3544600938967136e-05, |
|
"loss": 0.0606, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 1.2957746478873242e-05, |
|
"loss": 0.0587, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 1.2370892018779344e-05, |
|
"loss": 0.0543, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 12.32, |
|
"learning_rate": 1.1784037558685446e-05, |
|
"loss": 0.052, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 13.2, |
|
"learning_rate": 1.119718309859155e-05, |
|
"loss": 0.0489, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 1.0610328638497653e-05, |
|
"loss": 0.0466, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 1.0023474178403755e-05, |
|
"loss": 0.0437, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 9.43661971830986e-06, |
|
"loss": 0.0416, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"learning_rate": 8.849765258215963e-06, |
|
"loss": 0.0387, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 17.61, |
|
"learning_rate": 8.262910798122067e-06, |
|
"loss": 0.0371, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 18.49, |
|
"learning_rate": 7.67605633802817e-06, |
|
"loss": 0.0359, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"learning_rate": 7.089201877934273e-06, |
|
"loss": 0.0334, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 20.25, |
|
"learning_rate": 6.502347417840375e-06, |
|
"loss": 0.0323, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 21.13, |
|
"learning_rate": 5.915492957746479e-06, |
|
"loss": 0.0309, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"learning_rate": 5.328638497652583e-06, |
|
"loss": 0.0294, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 22.89, |
|
"learning_rate": 4.741784037558686e-06, |
|
"loss": 0.0278, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 23.77, |
|
"learning_rate": 4.154929577464789e-06, |
|
"loss": 0.0266, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 24.65, |
|
"learning_rate": 3.568075117370892e-06, |
|
"loss": 0.0256, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 25.53, |
|
"learning_rate": 2.9812206572769952e-06, |
|
"loss": 0.0248, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 26.41, |
|
"learning_rate": 2.3943661971830984e-06, |
|
"loss": 0.024, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 27.29, |
|
"learning_rate": 1.807511737089202e-06, |
|
"loss": 0.0236, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"learning_rate": 1.2206572769953053e-06, |
|
"loss": 0.0227, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 29.05, |
|
"learning_rate": 6.338028169014085e-07, |
|
"loss": 0.022, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 29.93, |
|
"learning_rate": 4.694835680751174e-08, |
|
"loss": 0.0219, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"step": 17040, |
|
"total_flos": 3.651708270625563e+16, |
|
"train_loss": 0.05439845494960955, |
|
"train_runtime": 9683.1827, |
|
"train_samples_per_second": 112.509, |
|
"train_steps_per_second": 1.76 |
|
} |
|
], |
|
"max_steps": 17040, |
|
"num_train_epochs": 30, |
|
"total_flos": 3.651708270625563e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|