|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 48090, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.948014140153878e-05, |
|
"loss": 0.7397, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.8960282803077565e-05, |
|
"loss": 0.2767, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.844042420461635e-05, |
|
"loss": 0.4767, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.792056560615513e-05, |
|
"loss": 0.4352, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.740070700769391e-05, |
|
"loss": 0.6092, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.688084840923269e-05, |
|
"loss": 0.1961, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.636098981077147e-05, |
|
"loss": 0.2384, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.5841131212310254e-05, |
|
"loss": 0.2925, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.532127261384904e-05, |
|
"loss": 0.1461, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.4801414015387816e-05, |
|
"loss": 0.1366, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.4281555416926594e-05, |
|
"loss": 0.0943, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.376169681846538e-05, |
|
"loss": 0.098, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.324183822000416e-05, |
|
"loss": 0.2768, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.272197962154294e-05, |
|
"loss": 0.1589, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.220212102308173e-05, |
|
"loss": 0.2075, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.1682262424620505e-05, |
|
"loss": 0.1032, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.116240382615928e-05, |
|
"loss": 0.092, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.064254522769807e-05, |
|
"loss": 0.0877, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.012268662923685e-05, |
|
"loss": 0.1081, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.960282803077563e-05, |
|
"loss": 0.0561, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.9082969432314415e-05, |
|
"loss": 0.0463, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.856311083385319e-05, |
|
"loss": 0.0453, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.804325223539197e-05, |
|
"loss": 0.0466, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.7523393636930756e-05, |
|
"loss": 0.0484, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.700353503846954e-05, |
|
"loss": 0.0442, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.648367644000832e-05, |
|
"loss": 0.1853, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.59638178415471e-05, |
|
"loss": 0.0908, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.544395924308588e-05, |
|
"loss": 0.0353, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.4924100644624667e-05, |
|
"loss": 0.0333, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.4404242046163445e-05, |
|
"loss": 0.0379, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.388438344770223e-05, |
|
"loss": 0.0439, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.336452484924101e-05, |
|
"loss": 0.0419, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.2844666250779785e-05, |
|
"loss": 0.0355, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.232480765231857e-05, |
|
"loss": 0.0368, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.1804949053857355e-05, |
|
"loss": 0.0336, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.128509045539613e-05, |
|
"loss": 0.176, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.076523185693492e-05, |
|
"loss": 0.2009, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.0245373258473696e-05, |
|
"loss": 0.1369, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.9725514660012477e-05, |
|
"loss": 0.1509, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.9205656061551262e-05, |
|
"loss": 0.1126, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.868579746309004e-05, |
|
"loss": 0.0878, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.816593886462882e-05, |
|
"loss": 0.0961, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.76460802661676e-05, |
|
"loss": 0.1387, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7126221667706388e-05, |
|
"loss": 0.0979, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.6606363069245166e-05, |
|
"loss": 0.0647, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.6086504470783947e-05, |
|
"loss": 0.05, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.5566645872322732e-05, |
|
"loss": 0.0221, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.504678727386151e-05, |
|
"loss": 0.0252, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.452692867540029e-05, |
|
"loss": 0.0245, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.4007070076939073e-05, |
|
"loss": 0.0195, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3487211478477854e-05, |
|
"loss": 0.023, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.296735288001664e-05, |
|
"loss": 0.0204, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.2447494281555417e-05, |
|
"loss": 0.0789, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.19276356830942e-05, |
|
"loss": 0.056, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.140777708463298e-05, |
|
"loss": 0.0301, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.088791848617176e-05, |
|
"loss": 0.0439, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.0368059887710546e-05, |
|
"loss": 0.0254, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.9848201289249324e-05, |
|
"loss": 0.0132, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.9328342690788105e-05, |
|
"loss": 0.019, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.880848409232689e-05, |
|
"loss": 0.0187, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.8288625493865668e-05, |
|
"loss": 0.0196, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.776876689540445e-05, |
|
"loss": 0.062, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.7248908296943234e-05, |
|
"loss": 0.0454, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.6729049698482012e-05, |
|
"loss": 0.0153, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.6209191100020797e-05, |
|
"loss": 0.0133, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5689332501559575e-05, |
|
"loss": 0.0158, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5169473903098358e-05, |
|
"loss": 0.0143, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.464961530463714e-05, |
|
"loss": 0.0196, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.412975670617592e-05, |
|
"loss": 0.01, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.3609898107714703e-05, |
|
"loss": 0.0089, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.3090039509253486e-05, |
|
"loss": 0.0106, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.2570180910792265e-05, |
|
"loss": 0.0093, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2050322312331047e-05, |
|
"loss": 0.0082, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.1530463713869828e-05, |
|
"loss": 0.0112, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.101060511540861e-05, |
|
"loss": 0.0132, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.0490746516947391e-05, |
|
"loss": 0.0092, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.970887918486172e-06, |
|
"loss": 0.0109, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 9.451029320024954e-06, |
|
"loss": 0.0083, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 8.931170721563735e-06, |
|
"loss": 0.0049, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 8.411312123102517e-06, |
|
"loss": 0.0076, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 7.891453524641298e-06, |
|
"loss": 0.0054, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 7.3715949261800795e-06, |
|
"loss": 0.0033, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 6.851736327718861e-06, |
|
"loss": 0.0051, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 6.3318777292576415e-06, |
|
"loss": 0.0057, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 5.812019130796424e-06, |
|
"loss": 0.008, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.292160532335205e-06, |
|
"loss": 0.005, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 4.7723019338739865e-06, |
|
"loss": 0.0058, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.252443335412768e-06, |
|
"loss": 0.0042, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.7325847369515494e-06, |
|
"loss": 0.0034, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.2127261384903308e-06, |
|
"loss": 0.0029, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 2.692867540029112e-06, |
|
"loss": 0.0036, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.1730089415678936e-06, |
|
"loss": 0.0034, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.6531503431066752e-06, |
|
"loss": 0.0021, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 1.1332917446454564e-06, |
|
"loss": 0.0039, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 6.134331461842379e-07, |
|
"loss": 0.0033, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 9.357454772301934e-08, |
|
"loss": 0.0068, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 48090, |
|
"total_flos": 1.579934745091154e+17, |
|
"train_runtime": 28299.6455, |
|
"train_samples_per_second": 1.699 |
|
} |
|
], |
|
"max_steps": 48090, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.579934745091154e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|