| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 26.0, | |
| "global_step": 96694, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5e-09, | |
| "loss": 10.4846, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 2.5e-06, | |
| "loss": 9.5826, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 5e-06, | |
| "loss": 8.0324, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 7.5e-06, | |
| "loss": 7.0008, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1e-05, | |
| "loss": 6.7031, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.25e-05, | |
| "loss": 6.5343, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.5e-05, | |
| "loss": 6.4222, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.75e-05, | |
| "loss": 6.3273, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2e-05, | |
| "loss": 6.2538, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.25e-05, | |
| "loss": 6.1897, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 2.5e-05, | |
| "loss": 6.1311, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 2.7500000000000004e-05, | |
| "loss": 6.079, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 3e-05, | |
| "loss": 6.0395, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 3.2500000000000004e-05, | |
| "loss": 5.997, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.5e-05, | |
| "loss": 5.965, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 5.9361, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4e-05, | |
| "loss": 5.9099, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.2495e-05, | |
| "loss": 5.8867, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 4.4995000000000005e-05, | |
| "loss": 5.8638, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 4.7495e-05, | |
| "loss": 5.845, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 4.9995000000000005e-05, | |
| "loss": 5.828, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 4.998314834867353e-05, | |
| "loss": 5.8092, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 4.996622902003249e-05, | |
| "loss": 5.7931, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 4.994930969139145e-05, | |
| "loss": 5.7772, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 4.993239036275041e-05, | |
| "loss": 5.7664, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 4.991547103410937e-05, | |
| "loss": 5.7551, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 4.9898585544125606e-05, | |
| "loss": 5.7489, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 4.988166621548457e-05, | |
| "loss": 5.7372, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 4.986474688684353e-05, | |
| "loss": 5.7249, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 4.984782755820249e-05, | |
| "loss": 5.7199, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 4.983097590687602e-05, | |
| "loss": 5.7125, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 4.981405657823498e-05, | |
| "loss": 5.7012, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 4.979713724959394e-05, | |
| "loss": 5.6956, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 4.97802179209529e-05, | |
| "loss": 5.6891, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 4.976329859231186e-05, | |
| "loss": 5.684, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 4.9746379263670824e-05, | |
| "loss": 5.678, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 4.9729459935029784e-05, | |
| "loss": 5.6705, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 4.9712540606388744e-05, | |
| "loss": 5.6676, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 4.969565511640498e-05, | |
| "loss": 5.6613, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 4.967873578776394e-05, | |
| "loss": 5.6558, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 4.966181645912291e-05, | |
| "loss": 5.6515, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 4.964489713048186e-05, | |
| "loss": 5.6456, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 4.962797780184082e-05, | |
| "loss": 5.6469, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 4.961105847319978e-05, | |
| "loss": 5.6387, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 4.959413914455875e-05, | |
| "loss": 5.638, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 4.957725365457499e-05, | |
| "loss": 5.6341, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 4.956033432593395e-05, | |
| "loss": 5.6258, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 4.954341499729291e-05, | |
| "loss": 5.6212, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 4.952649566865187e-05, | |
| "loss": 5.6209, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 4.9509576340010835e-05, | |
| "loss": 5.6152, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 4.9492657011369795e-05, | |
| "loss": 5.6119, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 4.947573768272875e-05, | |
| "loss": 5.6102, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 4.945881835408771e-05, | |
| "loss": 5.6093, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 4.9441932864103954e-05, | |
| "loss": 5.6016, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 4.9425013535462914e-05, | |
| "loss": 5.5963, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 4.9408094206821874e-05, | |
| "loss": 5.5972, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 4.9391174878180833e-05, | |
| "loss": 5.5972, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 4.937428938819708e-05, | |
| "loss": 5.5913, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 4.935737005955604e-05, | |
| "loss": 5.5872, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 4.9340450730915e-05, | |
| "loss": 5.5835, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 4.932356524093124e-05, | |
| "loss": 5.5839, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 4.93066459122902e-05, | |
| "loss": 5.5805, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 4.9289726583649165e-05, | |
| "loss": 5.5759, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 4.9272807255008125e-05, | |
| "loss": 5.5736, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 4.9255887926367085e-05, | |
| "loss": 5.4648, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 4.9238968597726045e-05, | |
| "loss": 5.2006, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 4.9222049269085005e-05, | |
| "loss": 4.9977, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 4.920516377910125e-05, | |
| "loss": 4.8081, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 4.918824445046021e-05, | |
| "loss": 4.6287, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 4.917132512181917e-05, | |
| "loss": 4.46, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 4.9154405793178123e-05, | |
| "loss": 4.2999, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 4.913748646453709e-05, | |
| "loss": 4.1407, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 4.912056713589605e-05, | |
| "loss": 3.9717, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 4.910364780725501e-05, | |
| "loss": 3.7197, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 4.908672847861397e-05, | |
| "loss": 3.3116, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 10.08, | |
| "learning_rate": 4.9069842988630216e-05, | |
| "loss": 2.6408, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 10.22, | |
| "learning_rate": 4.9052923659989176e-05, | |
| "loss": 2.316, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 10.35, | |
| "learning_rate": 4.9036004331348136e-05, | |
| "loss": 2.1523, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 10.49, | |
| "learning_rate": 4.9019085002707096e-05, | |
| "loss": 2.0432, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 4.900219951272334e-05, | |
| "loss": 1.9639, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 4.89852801840823e-05, | |
| "loss": 1.8997, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 10.89, | |
| "learning_rate": 4.896836085544126e-05, | |
| "loss": 1.8451, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 11.02, | |
| "learning_rate": 4.895144152680022e-05, | |
| "loss": 1.8004, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 11.16, | |
| "learning_rate": 4.8934522198159174e-05, | |
| "loss": 1.7583, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 11.29, | |
| "learning_rate": 4.891763670817543e-05, | |
| "loss": 1.7173, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 4.890071737953438e-05, | |
| "loss": 1.6864, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 11.56, | |
| "learning_rate": 4.888379805089334e-05, | |
| "loss": 1.6528, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "learning_rate": 4.88668787222523e-05, | |
| "loss": 1.6227, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 11.83, | |
| "learning_rate": 4.884995939361127e-05, | |
| "loss": 1.5983, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 4.8833073903627506e-05, | |
| "loss": 1.572, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 12.1, | |
| "learning_rate": 4.8816154574986466e-05, | |
| "loss": 1.5456, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 12.23, | |
| "learning_rate": 4.8799235246345426e-05, | |
| "loss": 1.5251, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 12.37, | |
| "learning_rate": 4.8782315917704385e-05, | |
| "loss": 1.5069, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 4.876543042772063e-05, | |
| "loss": 1.4878, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 12.64, | |
| "learning_rate": 4.874851109907959e-05, | |
| "loss": 1.4701, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 12.77, | |
| "learning_rate": 4.873159177043855e-05, | |
| "loss": 1.4539, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 4.871467244179751e-05, | |
| "loss": 1.4351, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 13.04, | |
| "learning_rate": 4.8697820790471036e-05, | |
| "loss": 1.4222, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 13.18, | |
| "learning_rate": 4.8680901461829996e-05, | |
| "loss": 1.4066, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 13.31, | |
| "learning_rate": 4.8663982133188956e-05, | |
| "loss": 1.3945, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 13.44, | |
| "learning_rate": 4.8647062804547916e-05, | |
| "loss": 1.3826, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 13.58, | |
| "learning_rate": 4.863014347590688e-05, | |
| "loss": 1.3704, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 13.71, | |
| "learning_rate": 4.861322414726584e-05, | |
| "loss": 1.3579, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 4.85963048186248e-05, | |
| "loss": 1.3498, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 13.98, | |
| "learning_rate": 4.8579385489983756e-05, | |
| "loss": 1.3353, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 14.12, | |
| "learning_rate": 4.8562466161342715e-05, | |
| "loss": 1.325, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 14.25, | |
| "learning_rate": 4.854558067135896e-05, | |
| "loss": 1.3157, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 4.852869518137521e-05, | |
| "loss": 1.3062, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 14.52, | |
| "learning_rate": 4.851177585273417e-05, | |
| "loss": 1.2965, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 14.65, | |
| "learning_rate": 4.849485652409313e-05, | |
| "loss": 1.2878, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 14.79, | |
| "learning_rate": 4.847793719545209e-05, | |
| "loss": 1.2812, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 14.92, | |
| "learning_rate": 4.846101786681105e-05, | |
| "loss": 1.2743, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 4.8444132376827286e-05, | |
| "loss": 1.2637, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 15.19, | |
| "learning_rate": 4.8427213048186246e-05, | |
| "loss": 1.2563, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 15.33, | |
| "learning_rate": 4.841029371954521e-05, | |
| "loss": 1.2498, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 15.46, | |
| "learning_rate": 4.839337439090417e-05, | |
| "loss": 1.2417, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 15.6, | |
| "learning_rate": 4.837645506226313e-05, | |
| "loss": 1.2354, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 15.73, | |
| "learning_rate": 4.835953573362209e-05, | |
| "loss": 1.2274, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 15.86, | |
| "learning_rate": 4.834265024363833e-05, | |
| "loss": 1.2227, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 4.83257309149973e-05, | |
| "loss": 1.2157, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 16.13, | |
| "learning_rate": 4.830881158635626e-05, | |
| "loss": 1.2092, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 16.27, | |
| "learning_rate": 4.829189225771522e-05, | |
| "loss": 1.2043, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 16.4, | |
| "learning_rate": 4.827497292907418e-05, | |
| "loss": 1.1968, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 16.54, | |
| "learning_rate": 4.8258087439090424e-05, | |
| "loss": 1.1929, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 4.8241168110449384e-05, | |
| "loss": 1.1853, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 4.822424878180834e-05, | |
| "loss": 1.1799, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 16.94, | |
| "learning_rate": 4.82073294531673e-05, | |
| "loss": 1.1758, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 17.07, | |
| "learning_rate": 4.819041012452626e-05, | |
| "loss": 1.1701, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 17.21, | |
| "learning_rate": 4.817349079588522e-05, | |
| "loss": 1.164, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 17.34, | |
| "learning_rate": 4.815660530590146e-05, | |
| "loss": 1.1566, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 17.48, | |
| "learning_rate": 4.813968597726042e-05, | |
| "loss": 1.1539, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "learning_rate": 4.812276664861938e-05, | |
| "loss": 1.1496, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 17.75, | |
| "learning_rate": 4.810584731997835e-05, | |
| "loss": 1.1435, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 17.88, | |
| "learning_rate": 4.808892799133731e-05, | |
| "loss": 1.1417, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 18.02, | |
| "learning_rate": 4.807204250135355e-05, | |
| "loss": 1.1361, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 18.15, | |
| "learning_rate": 4.805515701136979e-05, | |
| "loss": 1.1318, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 18.28, | |
| "learning_rate": 4.8038237682728754e-05, | |
| "loss": 1.1253, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 18.42, | |
| "learning_rate": 4.8021318354087714e-05, | |
| "loss": 1.1202, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 18.55, | |
| "learning_rate": 4.8004399025446674e-05, | |
| "loss": 1.1181, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 18.69, | |
| "learning_rate": 4.7987479696805633e-05, | |
| "loss": 1.1153, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 18.82, | |
| "learning_rate": 4.7970560368164593e-05, | |
| "loss": 1.1084, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 18.96, | |
| "learning_rate": 4.795367487818084e-05, | |
| "loss": 1.105, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 19.09, | |
| "learning_rate": 4.79367555495398e-05, | |
| "loss": 1.1036, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 19.23, | |
| "learning_rate": 4.791983622089876e-05, | |
| "loss": 1.0968, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 19.36, | |
| "learning_rate": 4.790291689225771e-05, | |
| "loss": 1.0944, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 19.49, | |
| "learning_rate": 4.7886031402273965e-05, | |
| "loss": 1.0924, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 19.63, | |
| "learning_rate": 4.786911207363292e-05, | |
| "loss": 1.0872, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 19.76, | |
| "learning_rate": 4.785219274499188e-05, | |
| "loss": 1.084, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 19.9, | |
| "learning_rate": 4.783527341635084e-05, | |
| "loss": 1.0801, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 20.03, | |
| "learning_rate": 4.7818354087709805e-05, | |
| "loss": 1.0768, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 20.17, | |
| "learning_rate": 4.7801468597726044e-05, | |
| "loss": 1.0728, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 20.3, | |
| "learning_rate": 4.7784549269085004e-05, | |
| "loss": 1.0696, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 20.44, | |
| "learning_rate": 4.7767629940443963e-05, | |
| "loss": 1.0646, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 20.57, | |
| "learning_rate": 4.7750710611802923e-05, | |
| "loss": 1.0634, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 20.7, | |
| "learning_rate": 4.773382512181917e-05, | |
| "loss": 1.0596, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 20.84, | |
| "learning_rate": 4.771690579317813e-05, | |
| "loss": 1.0572, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 20.97, | |
| "learning_rate": 4.769998646453709e-05, | |
| "loss": 1.0549, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 21.11, | |
| "learning_rate": 4.768306713589605e-05, | |
| "loss": 1.05, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 21.24, | |
| "learning_rate": 4.7666181645912295e-05, | |
| "loss": 1.0477, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 21.38, | |
| "learning_rate": 4.7649262317271255e-05, | |
| "loss": 1.0423, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 21.51, | |
| "learning_rate": 4.7632342988630215e-05, | |
| "loss": 1.0425, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 21.65, | |
| "learning_rate": 4.7615423659989175e-05, | |
| "loss": 1.04, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 21.78, | |
| "learning_rate": 4.7598504331348135e-05, | |
| "loss": 1.0379, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 21.91, | |
| "learning_rate": 4.7581585002707095e-05, | |
| "loss": 1.0362, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 22.05, | |
| "learning_rate": 4.7564665674066054e-05, | |
| "loss": 1.0299, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 22.18, | |
| "learning_rate": 4.7547780184082293e-05, | |
| "loss": 1.0288, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 22.32, | |
| "learning_rate": 4.7530860855441253e-05, | |
| "loss": 1.0267, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 22.45, | |
| "learning_rate": 4.751394152680022e-05, | |
| "loss": 1.0233, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 22.59, | |
| "learning_rate": 4.749702219815918e-05, | |
| "loss": 1.0218, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 22.72, | |
| "learning_rate": 4.748010286951814e-05, | |
| "loss": 1.0201, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 22.86, | |
| "learning_rate": 4.7463251218191665e-05, | |
| "loss": 1.0174, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 22.99, | |
| "learning_rate": 4.7446331889550625e-05, | |
| "loss": 1.0126, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 23.12, | |
| "learning_rate": 4.7429412560909585e-05, | |
| "loss": 1.0109, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 23.26, | |
| "learning_rate": 4.7412493232268545e-05, | |
| "loss": 1.0083, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 23.39, | |
| "learning_rate": 4.7395573903627505e-05, | |
| "loss": 1.0095, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 23.53, | |
| "learning_rate": 4.7378654574986465e-05, | |
| "loss": 1.0056, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 23.66, | |
| "learning_rate": 4.736173524634543e-05, | |
| "loss": 1.0019, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 23.8, | |
| "learning_rate": 4.734481591770439e-05, | |
| "loss": 1.0015, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 23.93, | |
| "learning_rate": 4.732793042772063e-05, | |
| "loss": 0.9992, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 24.07, | |
| "learning_rate": 4.731101109907959e-05, | |
| "loss": 0.9968, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 24.2, | |
| "learning_rate": 4.729409177043855e-05, | |
| "loss": 0.9941, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 24.33, | |
| "learning_rate": 4.727717244179751e-05, | |
| "loss": 0.9916, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 24.47, | |
| "learning_rate": 4.7260286951813756e-05, | |
| "loss": 0.9906, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 24.6, | |
| "learning_rate": 4.7243367623172716e-05, | |
| "loss": 0.9892, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 24.74, | |
| "learning_rate": 4.7226448294531676e-05, | |
| "loss": 0.9857, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 24.87, | |
| "learning_rate": 4.7209528965890636e-05, | |
| "loss": 0.9847, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 25.01, | |
| "learning_rate": 4.719264347590688e-05, | |
| "loss": 0.9816, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 25.14, | |
| "learning_rate": 4.7175724147265835e-05, | |
| "loss": 0.9799, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 25.28, | |
| "learning_rate": 4.715883865728209e-05, | |
| "loss": 0.9786, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 25.41, | |
| "learning_rate": 4.714191932864104e-05, | |
| "loss": 0.9759, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 25.54, | |
| "learning_rate": 4.7125e-05, | |
| "loss": 0.9751, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 25.68, | |
| "learning_rate": 4.710808067135896e-05, | |
| "loss": 0.9754, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 25.81, | |
| "learning_rate": 4.709116134271792e-05, | |
| "loss": 0.9705, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 25.95, | |
| "learning_rate": 4.707424201407689e-05, | |
| "loss": 0.9707, | |
| "step": 96500 | |
| } | |
| ], | |
| "max_steps": 1487600, | |
| "num_train_epochs": 400, | |
| "total_flos": 2.605545353563131e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |