Muedi's picture
Training in progress, step 21000, checkpoint
1ff499f verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.6156191369606003,
"eval_steps": 500,
"global_step": 21000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0029315196998123826,
"grad_norm": 0.7248865962028503,
"learning_rate": 4.9868042930033434e-05,
"loss": 1.3593,
"step": 100
},
{
"epoch": 0.005863039399624765,
"grad_norm": 0.3698488771915436,
"learning_rate": 4.972142396340391e-05,
"loss": 1.3285,
"step": 200
},
{
"epoch": 0.008794559099437148,
"grad_norm": 0.24848727881908417,
"learning_rate": 4.957480499677439e-05,
"loss": 1.324,
"step": 300
},
{
"epoch": 0.01172607879924953,
"grad_norm": 0.2648352086544037,
"learning_rate": 4.942818603014486e-05,
"loss": 1.3145,
"step": 400
},
{
"epoch": 0.014657598499061914,
"grad_norm": 0.3995356559753418,
"learning_rate": 4.928156706351534e-05,
"loss": 1.3141,
"step": 500
},
{
"epoch": 0.017589118198874296,
"grad_norm": 0.32299748063087463,
"learning_rate": 4.913494809688581e-05,
"loss": 1.3087,
"step": 600
},
{
"epoch": 0.02052063789868668,
"grad_norm": 0.33237338066101074,
"learning_rate": 4.898832913025629e-05,
"loss": 1.3071,
"step": 700
},
{
"epoch": 0.02345215759849906,
"grad_norm": 0.2716487646102905,
"learning_rate": 4.8841710163626766e-05,
"loss": 1.3048,
"step": 800
},
{
"epoch": 0.026383677298311446,
"grad_norm": 0.8654017448425293,
"learning_rate": 4.869509119699725e-05,
"loss": 1.3087,
"step": 900
},
{
"epoch": 0.02931519699812383,
"grad_norm": 0.27945375442504883,
"learning_rate": 4.854847223036773e-05,
"loss": 1.3015,
"step": 1000
},
{
"epoch": 0.03224671669793621,
"grad_norm": 0.27838173508644104,
"learning_rate": 4.84018532637382e-05,
"loss": 1.2998,
"step": 1100
},
{
"epoch": 0.03517823639774859,
"grad_norm": 0.24157749116420746,
"learning_rate": 4.8255234297108676e-05,
"loss": 1.3016,
"step": 1200
},
{
"epoch": 0.038109756097560975,
"grad_norm": 0.45793822407722473,
"learning_rate": 4.810861533047915e-05,
"loss": 1.2983,
"step": 1300
},
{
"epoch": 0.04104127579737336,
"grad_norm": 0.1312064826488495,
"learning_rate": 4.796199636384963e-05,
"loss": 1.2978,
"step": 1400
},
{
"epoch": 0.04397279549718574,
"grad_norm": 0.33062976598739624,
"learning_rate": 4.7815377397220105e-05,
"loss": 1.2972,
"step": 1500
},
{
"epoch": 0.04690431519699812,
"grad_norm": 0.29819390177726746,
"learning_rate": 4.766875843059058e-05,
"loss": 1.2945,
"step": 1600
},
{
"epoch": 0.0498358348968105,
"grad_norm": 0.3396131694316864,
"learning_rate": 4.752213946396106e-05,
"loss": 1.2921,
"step": 1700
},
{
"epoch": 0.05276735459662289,
"grad_norm": 0.3231181502342224,
"learning_rate": 4.737552049733154e-05,
"loss": 1.2932,
"step": 1800
},
{
"epoch": 0.055698874296435275,
"grad_norm": 0.4015660285949707,
"learning_rate": 4.7228901530702015e-05,
"loss": 1.2899,
"step": 1900
},
{
"epoch": 0.05863039399624766,
"grad_norm": 0.436213880777359,
"learning_rate": 4.708228256407249e-05,
"loss": 1.2906,
"step": 2000
},
{
"epoch": 0.06156191369606004,
"grad_norm": 0.3451833426952362,
"learning_rate": 4.693566359744297e-05,
"loss": 1.2884,
"step": 2100
},
{
"epoch": 0.06449343339587242,
"grad_norm": 0.41890543699264526,
"learning_rate": 4.6789044630813445e-05,
"loss": 1.2892,
"step": 2200
},
{
"epoch": 0.0674249530956848,
"grad_norm": 0.3117181062698364,
"learning_rate": 4.664242566418392e-05,
"loss": 1.2866,
"step": 2300
},
{
"epoch": 0.07035647279549719,
"grad_norm": 0.2703840136528015,
"learning_rate": 4.64958066975544e-05,
"loss": 1.2869,
"step": 2400
},
{
"epoch": 0.07328799249530957,
"grad_norm": 0.31400740146636963,
"learning_rate": 4.6349187730924874e-05,
"loss": 1.2844,
"step": 2500
},
{
"epoch": 0.07621951219512195,
"grad_norm": 0.36265355348587036,
"learning_rate": 4.620256876429535e-05,
"loss": 1.2847,
"step": 2600
},
{
"epoch": 0.07915103189493433,
"grad_norm": 0.32774218916893005,
"learning_rate": 4.605594979766583e-05,
"loss": 1.2889,
"step": 2700
},
{
"epoch": 0.08208255159474671,
"grad_norm": 0.26954421401023865,
"learning_rate": 4.590933083103631e-05,
"loss": 1.2823,
"step": 2800
},
{
"epoch": 0.0850140712945591,
"grad_norm": 0.4042912423610687,
"learning_rate": 4.5762711864406784e-05,
"loss": 1.2869,
"step": 2900
},
{
"epoch": 0.08794559099437148,
"grad_norm": 0.28043991327285767,
"learning_rate": 4.561609289777726e-05,
"loss": 1.2851,
"step": 3000
},
{
"epoch": 0.09087711069418386,
"grad_norm": 0.35115665197372437,
"learning_rate": 4.546947393114774e-05,
"loss": 1.2816,
"step": 3100
},
{
"epoch": 0.09380863039399624,
"grad_norm": 0.3325822651386261,
"learning_rate": 4.532285496451821e-05,
"loss": 1.2821,
"step": 3200
},
{
"epoch": 0.09674015009380862,
"grad_norm": 0.14176355302333832,
"learning_rate": 4.517623599788869e-05,
"loss": 1.2808,
"step": 3300
},
{
"epoch": 0.099671669793621,
"grad_norm": 0.3418448567390442,
"learning_rate": 4.502961703125916e-05,
"loss": 1.2808,
"step": 3400
},
{
"epoch": 0.1026031894934334,
"grad_norm": 0.3579727113246918,
"learning_rate": 4.488299806462964e-05,
"loss": 1.2768,
"step": 3500
},
{
"epoch": 0.10553470919324578,
"grad_norm": 0.2470945566892624,
"learning_rate": 4.473637909800012e-05,
"loss": 1.2812,
"step": 3600
},
{
"epoch": 0.10846622889305817,
"grad_norm": 0.32134631276130676,
"learning_rate": 4.45897601313706e-05,
"loss": 1.2797,
"step": 3700
},
{
"epoch": 0.11139774859287055,
"grad_norm": 0.2982274889945984,
"learning_rate": 4.444314116474108e-05,
"loss": 1.2762,
"step": 3800
},
{
"epoch": 0.11432926829268293,
"grad_norm": 0.2519618570804596,
"learning_rate": 4.429652219811155e-05,
"loss": 1.2767,
"step": 3900
},
{
"epoch": 0.11726078799249531,
"grad_norm": 0.30841368436813354,
"learning_rate": 4.4149903231482026e-05,
"loss": 1.2765,
"step": 4000
},
{
"epoch": 0.1201923076923077,
"grad_norm": 0.3413415551185608,
"learning_rate": 4.40032842648525e-05,
"loss": 1.274,
"step": 4100
},
{
"epoch": 0.12312382739212008,
"grad_norm": 0.3912579119205475,
"learning_rate": 4.385666529822298e-05,
"loss": 1.2694,
"step": 4200
},
{
"epoch": 0.12605534709193245,
"grad_norm": 0.39514127373695374,
"learning_rate": 4.3710046331593455e-05,
"loss": 1.2742,
"step": 4300
},
{
"epoch": 0.12898686679174484,
"grad_norm": 0.5046316385269165,
"learning_rate": 4.356342736496393e-05,
"loss": 1.2723,
"step": 4400
},
{
"epoch": 0.1319183864915572,
"grad_norm": 0.40433749556541443,
"learning_rate": 4.341680839833441e-05,
"loss": 1.2714,
"step": 4500
},
{
"epoch": 0.1348499061913696,
"grad_norm": 0.41701552271842957,
"learning_rate": 4.327018943170489e-05,
"loss": 1.2686,
"step": 4600
},
{
"epoch": 0.137781425891182,
"grad_norm": 0.6152161359786987,
"learning_rate": 4.3123570465075365e-05,
"loss": 1.2642,
"step": 4700
},
{
"epoch": 0.14071294559099437,
"grad_norm": 0.6579223275184631,
"learning_rate": 4.297695149844584e-05,
"loss": 1.2648,
"step": 4800
},
{
"epoch": 0.14364446529080677,
"grad_norm": 0.47039562463760376,
"learning_rate": 4.283033253181632e-05,
"loss": 1.2623,
"step": 4900
},
{
"epoch": 0.14657598499061913,
"grad_norm": 0.5053458213806152,
"learning_rate": 4.2683713565186795e-05,
"loss": 1.2581,
"step": 5000
},
{
"epoch": 0.14950750469043153,
"grad_norm": 0.6096323728561401,
"learning_rate": 4.253709459855727e-05,
"loss": 1.2526,
"step": 5100
},
{
"epoch": 0.1524390243902439,
"grad_norm": 0.6972830891609192,
"learning_rate": 4.239047563192775e-05,
"loss": 1.2541,
"step": 5200
},
{
"epoch": 0.1553705440900563,
"grad_norm": 0.27344340085983276,
"learning_rate": 4.2243856665298224e-05,
"loss": 1.2496,
"step": 5300
},
{
"epoch": 0.15830206378986866,
"grad_norm": 0.6050379276275635,
"learning_rate": 4.20972376986687e-05,
"loss": 1.243,
"step": 5400
},
{
"epoch": 0.16123358348968106,
"grad_norm": 0.4654715061187744,
"learning_rate": 4.195061873203918e-05,
"loss": 1.2398,
"step": 5500
},
{
"epoch": 0.16416510318949343,
"grad_norm": 0.8257409334182739,
"learning_rate": 4.180399976540966e-05,
"loss": 1.2377,
"step": 5600
},
{
"epoch": 0.16709662288930582,
"grad_norm": 0.74887615442276,
"learning_rate": 4.1657380798780134e-05,
"loss": 1.236,
"step": 5700
},
{
"epoch": 0.1700281425891182,
"grad_norm": 0.7653385996818542,
"learning_rate": 4.151076183215061e-05,
"loss": 1.2276,
"step": 5800
},
{
"epoch": 0.1729596622889306,
"grad_norm": 0.7907219529151917,
"learning_rate": 4.136414286552109e-05,
"loss": 1.221,
"step": 5900
},
{
"epoch": 0.17589118198874296,
"grad_norm": 0.6932777762413025,
"learning_rate": 4.121752389889156e-05,
"loss": 1.2205,
"step": 6000
},
{
"epoch": 0.17882270168855535,
"grad_norm": 0.7687219381332397,
"learning_rate": 4.107090493226204e-05,
"loss": 1.2145,
"step": 6100
},
{
"epoch": 0.18175422138836772,
"grad_norm": 0.850978434085846,
"learning_rate": 4.092428596563251e-05,
"loss": 1.2066,
"step": 6200
},
{
"epoch": 0.18468574108818012,
"grad_norm": 0.8447002172470093,
"learning_rate": 4.077766699900299e-05,
"loss": 1.2091,
"step": 6300
},
{
"epoch": 0.18761726078799248,
"grad_norm": 1.415692687034607,
"learning_rate": 4.063104803237347e-05,
"loss": 1.1981,
"step": 6400
},
{
"epoch": 0.19054878048780488,
"grad_norm": 0.9456603527069092,
"learning_rate": 4.048442906574395e-05,
"loss": 1.2041,
"step": 6500
},
{
"epoch": 0.19348030018761725,
"grad_norm": 1.8772284984588623,
"learning_rate": 4.033781009911443e-05,
"loss": 1.199,
"step": 6600
},
{
"epoch": 0.19641181988742965,
"grad_norm": 0.8293085694313049,
"learning_rate": 4.01911911324849e-05,
"loss": 1.1886,
"step": 6700
},
{
"epoch": 0.199343339587242,
"grad_norm": 1.4174566268920898,
"learning_rate": 4.0044572165855376e-05,
"loss": 1.1851,
"step": 6800
},
{
"epoch": 0.2022748592870544,
"grad_norm": 1.6964879035949707,
"learning_rate": 3.989795319922585e-05,
"loss": 1.1851,
"step": 6900
},
{
"epoch": 0.2052063789868668,
"grad_norm": 2.025838613510132,
"learning_rate": 3.975133423259633e-05,
"loss": 1.1778,
"step": 7000
},
{
"epoch": 0.20813789868667917,
"grad_norm": 1.919710397720337,
"learning_rate": 3.9604715265966805e-05,
"loss": 1.1717,
"step": 7100
},
{
"epoch": 0.21106941838649157,
"grad_norm": 1.467106580734253,
"learning_rate": 3.945809629933728e-05,
"loss": 1.1593,
"step": 7200
},
{
"epoch": 0.21400093808630394,
"grad_norm": 2.1268837451934814,
"learning_rate": 3.931147733270776e-05,
"loss": 1.162,
"step": 7300
},
{
"epoch": 0.21693245778611633,
"grad_norm": 1.1182403564453125,
"learning_rate": 3.916485836607824e-05,
"loss": 1.1487,
"step": 7400
},
{
"epoch": 0.2198639774859287,
"grad_norm": 1.3415223360061646,
"learning_rate": 3.9018239399448715e-05,
"loss": 1.1471,
"step": 7500
},
{
"epoch": 0.2227954971857411,
"grad_norm": 1.8642910718917847,
"learning_rate": 3.887162043281919e-05,
"loss": 1.1555,
"step": 7600
},
{
"epoch": 0.22572701688555347,
"grad_norm": 1.912855625152588,
"learning_rate": 3.872500146618967e-05,
"loss": 1.1328,
"step": 7700
},
{
"epoch": 0.22865853658536586,
"grad_norm": 2.582273244857788,
"learning_rate": 3.8578382499560145e-05,
"loss": 1.1394,
"step": 7800
},
{
"epoch": 0.23159005628517823,
"grad_norm": 1.6247990131378174,
"learning_rate": 3.843176353293062e-05,
"loss": 1.1413,
"step": 7900
},
{
"epoch": 0.23452157598499063,
"grad_norm": 2.7654881477355957,
"learning_rate": 3.82851445663011e-05,
"loss": 1.1219,
"step": 8000
},
{
"epoch": 0.237453095684803,
"grad_norm": 1.3285764455795288,
"learning_rate": 3.8138525599671574e-05,
"loss": 1.1267,
"step": 8100
},
{
"epoch": 0.2403846153846154,
"grad_norm": 3.288980484008789,
"learning_rate": 3.7991906633042055e-05,
"loss": 1.1151,
"step": 8200
},
{
"epoch": 0.24331613508442776,
"grad_norm": 1.586205244064331,
"learning_rate": 3.784528766641253e-05,
"loss": 1.1202,
"step": 8300
},
{
"epoch": 0.24624765478424016,
"grad_norm": 2.017575979232788,
"learning_rate": 3.769866869978301e-05,
"loss": 1.12,
"step": 8400
},
{
"epoch": 0.24917917448405252,
"grad_norm": 3.5337297916412354,
"learning_rate": 3.7552049733153484e-05,
"loss": 1.109,
"step": 8500
},
{
"epoch": 0.2521106941838649,
"grad_norm": 2.3067338466644287,
"learning_rate": 3.740543076652396e-05,
"loss": 1.1157,
"step": 8600
},
{
"epoch": 0.2550422138836773,
"grad_norm": 1.8356772661209106,
"learning_rate": 3.725881179989444e-05,
"loss": 1.1013,
"step": 8700
},
{
"epoch": 0.2579737335834897,
"grad_norm": 1.1588494777679443,
"learning_rate": 3.711219283326491e-05,
"loss": 1.0838,
"step": 8800
},
{
"epoch": 0.26090525328330205,
"grad_norm": 2.9327263832092285,
"learning_rate": 3.696557386663539e-05,
"loss": 1.0998,
"step": 8900
},
{
"epoch": 0.2638367729831144,
"grad_norm": 1.1504569053649902,
"learning_rate": 3.681895490000586e-05,
"loss": 1.1067,
"step": 9000
},
{
"epoch": 0.26676829268292684,
"grad_norm": 1.546099066734314,
"learning_rate": 3.667233593337634e-05,
"loss": 1.0985,
"step": 9100
},
{
"epoch": 0.2696998123827392,
"grad_norm": 2.004364252090454,
"learning_rate": 3.652571696674682e-05,
"loss": 1.0906,
"step": 9200
},
{
"epoch": 0.2726313320825516,
"grad_norm": 2.3435940742492676,
"learning_rate": 3.63790980001173e-05,
"loss": 1.0815,
"step": 9300
},
{
"epoch": 0.275562851782364,
"grad_norm": 1.5270298719406128,
"learning_rate": 3.623247903348778e-05,
"loss": 1.08,
"step": 9400
},
{
"epoch": 0.2784943714821764,
"grad_norm": 4.2500386238098145,
"learning_rate": 3.608586006685825e-05,
"loss": 1.0857,
"step": 9500
},
{
"epoch": 0.28142589118198874,
"grad_norm": 1.3212778568267822,
"learning_rate": 3.5939241100228726e-05,
"loss": 1.0717,
"step": 9600
},
{
"epoch": 0.2843574108818011,
"grad_norm": 3.6155266761779785,
"learning_rate": 3.57926221335992e-05,
"loss": 1.0798,
"step": 9700
},
{
"epoch": 0.28728893058161353,
"grad_norm": 3.934211492538452,
"learning_rate": 3.564600316696968e-05,
"loss": 1.0777,
"step": 9800
},
{
"epoch": 0.2902204502814259,
"grad_norm": 1.3743681907653809,
"learning_rate": 3.5499384200340155e-05,
"loss": 1.0776,
"step": 9900
},
{
"epoch": 0.29315196998123827,
"grad_norm": 1.2762603759765625,
"learning_rate": 3.5352765233710636e-05,
"loss": 1.0653,
"step": 10000
},
{
"epoch": 0.29608348968105064,
"grad_norm": 1.7702666521072388,
"learning_rate": 3.520614626708112e-05,
"loss": 1.0623,
"step": 10100
},
{
"epoch": 0.29901500938086306,
"grad_norm": 1.9537125825881958,
"learning_rate": 3.505952730045159e-05,
"loss": 1.077,
"step": 10200
},
{
"epoch": 0.30194652908067543,
"grad_norm": 1.6890079975128174,
"learning_rate": 3.4912908333822065e-05,
"loss": 1.0689,
"step": 10300
},
{
"epoch": 0.3048780487804878,
"grad_norm": 2.9840946197509766,
"learning_rate": 3.476628936719254e-05,
"loss": 1.0683,
"step": 10400
},
{
"epoch": 0.30780956848030017,
"grad_norm": 0.620637834072113,
"learning_rate": 3.461967040056302e-05,
"loss": 1.0599,
"step": 10500
},
{
"epoch": 0.3107410881801126,
"grad_norm": 2.518418073654175,
"learning_rate": 3.4473051433933494e-05,
"loss": 1.0511,
"step": 10600
},
{
"epoch": 0.31367260787992496,
"grad_norm": 4.154684543609619,
"learning_rate": 3.432643246730397e-05,
"loss": 1.0639,
"step": 10700
},
{
"epoch": 0.3166041275797373,
"grad_norm": 2.329303741455078,
"learning_rate": 3.417981350067445e-05,
"loss": 1.0606,
"step": 10800
},
{
"epoch": 0.3195356472795497,
"grad_norm": 1.5261120796203613,
"learning_rate": 3.4033194534044924e-05,
"loss": 1.0651,
"step": 10900
},
{
"epoch": 0.3224671669793621,
"grad_norm": 1.4638831615447998,
"learning_rate": 3.3886575567415405e-05,
"loss": 1.0508,
"step": 11000
},
{
"epoch": 0.3253986866791745,
"grad_norm": 1.0534781217575073,
"learning_rate": 3.373995660078588e-05,
"loss": 1.0593,
"step": 11100
},
{
"epoch": 0.32833020637898686,
"grad_norm": 1.870732307434082,
"learning_rate": 3.359333763415636e-05,
"loss": 1.0464,
"step": 11200
},
{
"epoch": 0.3312617260787992,
"grad_norm": 1.8039565086364746,
"learning_rate": 3.3446718667526834e-05,
"loss": 1.0496,
"step": 11300
},
{
"epoch": 0.33419324577861165,
"grad_norm": 1.551080584526062,
"learning_rate": 3.330009970089731e-05,
"loss": 1.03,
"step": 11400
},
{
"epoch": 0.337124765478424,
"grad_norm": 1.5708621740341187,
"learning_rate": 3.315348073426779e-05,
"loss": 1.0538,
"step": 11500
},
{
"epoch": 0.3400562851782364,
"grad_norm": 2.1092281341552734,
"learning_rate": 3.300686176763826e-05,
"loss": 1.0436,
"step": 11600
},
{
"epoch": 0.3429878048780488,
"grad_norm": 1.5008190870285034,
"learning_rate": 3.286024280100874e-05,
"loss": 1.0379,
"step": 11700
},
{
"epoch": 0.3459193245778612,
"grad_norm": 0.9910880923271179,
"learning_rate": 3.271362383437922e-05,
"loss": 1.0252,
"step": 11800
},
{
"epoch": 0.34885084427767354,
"grad_norm": 0.7434718012809753,
"learning_rate": 3.256700486774969e-05,
"loss": 1.0419,
"step": 11900
},
{
"epoch": 0.3517823639774859,
"grad_norm": 5.62740421295166,
"learning_rate": 3.242038590112017e-05,
"loss": 1.0448,
"step": 12000
},
{
"epoch": 0.35471388367729834,
"grad_norm": 0.7728565335273743,
"learning_rate": 3.227376693449065e-05,
"loss": 1.0381,
"step": 12100
},
{
"epoch": 0.3576454033771107,
"grad_norm": 0.6013042330741882,
"learning_rate": 3.212714796786113e-05,
"loss": 1.0316,
"step": 12200
},
{
"epoch": 0.3605769230769231,
"grad_norm": 1.558634638786316,
"learning_rate": 3.19805290012316e-05,
"loss": 1.0423,
"step": 12300
},
{
"epoch": 0.36350844277673544,
"grad_norm": 2.510350465774536,
"learning_rate": 3.1833910034602076e-05,
"loss": 1.0261,
"step": 12400
},
{
"epoch": 0.36643996247654786,
"grad_norm": 1.5596210956573486,
"learning_rate": 3.168729106797255e-05,
"loss": 1.0289,
"step": 12500
},
{
"epoch": 0.36937148217636023,
"grad_norm": 1.2188498973846436,
"learning_rate": 3.154067210134303e-05,
"loss": 1.0307,
"step": 12600
},
{
"epoch": 0.3723030018761726,
"grad_norm": 1.0569968223571777,
"learning_rate": 3.1394053134713505e-05,
"loss": 1.0114,
"step": 12700
},
{
"epoch": 0.37523452157598497,
"grad_norm": 1.9288491010665894,
"learning_rate": 3.1247434168083986e-05,
"loss": 1.0382,
"step": 12800
},
{
"epoch": 0.3781660412757974,
"grad_norm": 2.289350748062134,
"learning_rate": 3.110081520145447e-05,
"loss": 1.0367,
"step": 12900
},
{
"epoch": 0.38109756097560976,
"grad_norm": 1.0306146144866943,
"learning_rate": 3.095419623482494e-05,
"loss": 1.0417,
"step": 13000
},
{
"epoch": 0.38402908067542213,
"grad_norm": 1.7335386276245117,
"learning_rate": 3.0807577268195415e-05,
"loss": 1.0433,
"step": 13100
},
{
"epoch": 0.3869606003752345,
"grad_norm": 1.5590994358062744,
"learning_rate": 3.066095830156589e-05,
"loss": 1.0313,
"step": 13200
},
{
"epoch": 0.3898921200750469,
"grad_norm": 0.944471001625061,
"learning_rate": 3.051433933493637e-05,
"loss": 1.0338,
"step": 13300
},
{
"epoch": 0.3928236397748593,
"grad_norm": 0.9511914849281311,
"learning_rate": 3.0367720368306844e-05,
"loss": 1.0313,
"step": 13400
},
{
"epoch": 0.39575515947467166,
"grad_norm": 0.8080090880393982,
"learning_rate": 3.0221101401677322e-05,
"loss": 1.0286,
"step": 13500
},
{
"epoch": 0.398686679174484,
"grad_norm": 1.507084846496582,
"learning_rate": 3.0074482435047803e-05,
"loss": 1.0267,
"step": 13600
},
{
"epoch": 0.40161819887429645,
"grad_norm": 0.6863479614257812,
"learning_rate": 2.9927863468418277e-05,
"loss": 1.0271,
"step": 13700
},
{
"epoch": 0.4045497185741088,
"grad_norm": 2.595979690551758,
"learning_rate": 2.978124450178875e-05,
"loss": 1.0119,
"step": 13800
},
{
"epoch": 0.4074812382739212,
"grad_norm": 0.9219207763671875,
"learning_rate": 2.963462553515923e-05,
"loss": 1.015,
"step": 13900
},
{
"epoch": 0.4104127579737336,
"grad_norm": 3.082329750061035,
"learning_rate": 2.948800656852971e-05,
"loss": 1.02,
"step": 14000
},
{
"epoch": 0.413344277673546,
"grad_norm": 2.0359930992126465,
"learning_rate": 2.9341387601900184e-05,
"loss": 1.0227,
"step": 14100
},
{
"epoch": 0.41627579737335835,
"grad_norm": 1.199521541595459,
"learning_rate": 2.9194768635270658e-05,
"loss": 1.0234,
"step": 14200
},
{
"epoch": 0.4192073170731707,
"grad_norm": 4.709115982055664,
"learning_rate": 2.904814966864114e-05,
"loss": 1.0262,
"step": 14300
},
{
"epoch": 0.42213883677298314,
"grad_norm": 2.031787872314453,
"learning_rate": 2.8901530702011613e-05,
"loss": 1.0129,
"step": 14400
},
{
"epoch": 0.4250703564727955,
"grad_norm": 1.0663975477218628,
"learning_rate": 2.875491173538209e-05,
"loss": 1.0231,
"step": 14500
},
{
"epoch": 0.4280018761726079,
"grad_norm": 1.1190619468688965,
"learning_rate": 2.8608292768752564e-05,
"loss": 1.0203,
"step": 14600
},
{
"epoch": 0.43093339587242024,
"grad_norm": 1.1025962829589844,
"learning_rate": 2.8461673802123045e-05,
"loss": 1.0098,
"step": 14700
},
{
"epoch": 0.43386491557223267,
"grad_norm": 2.02659273147583,
"learning_rate": 2.831505483549352e-05,
"loss": 1.0146,
"step": 14800
},
{
"epoch": 0.43679643527204504,
"grad_norm": 1.0219608545303345,
"learning_rate": 2.8168435868863997e-05,
"loss": 1.0233,
"step": 14900
},
{
"epoch": 0.4397279549718574,
"grad_norm": 1.0353147983551025,
"learning_rate": 2.8021816902234478e-05,
"loss": 1.0094,
"step": 15000
},
{
"epoch": 0.4426594746716698,
"grad_norm": 1.9201918840408325,
"learning_rate": 2.7875197935604952e-05,
"loss": 1.0042,
"step": 15100
},
{
"epoch": 0.4455909943714822,
"grad_norm": 0.9451847672462463,
"learning_rate": 2.7728578968975426e-05,
"loss": 1.0179,
"step": 15200
},
{
"epoch": 0.44852251407129456,
"grad_norm": 2.44089937210083,
"learning_rate": 2.7581960002345904e-05,
"loss": 1.0182,
"step": 15300
},
{
"epoch": 0.45145403377110693,
"grad_norm": 1.49729585647583,
"learning_rate": 2.7435341035716384e-05,
"loss": 1.019,
"step": 15400
},
{
"epoch": 0.4543855534709193,
"grad_norm": 3.6466991901397705,
"learning_rate": 2.728872206908686e-05,
"loss": 1.0255,
"step": 15500
},
{
"epoch": 0.4573170731707317,
"grad_norm": 1.428072452545166,
"learning_rate": 2.7142103102457333e-05,
"loss": 1.0137,
"step": 15600
},
{
"epoch": 0.4602485928705441,
"grad_norm": 1.0874691009521484,
"learning_rate": 2.6995484135827814e-05,
"loss": 0.9944,
"step": 15700
},
{
"epoch": 0.46318011257035646,
"grad_norm": 0.8218618631362915,
"learning_rate": 2.684886516919829e-05,
"loss": 0.9995,
"step": 15800
},
{
"epoch": 0.46611163227016883,
"grad_norm": 1.0309679508209229,
"learning_rate": 2.6702246202568765e-05,
"loss": 1.0211,
"step": 15900
},
{
"epoch": 0.46904315196998125,
"grad_norm": 1.7576292753219604,
"learning_rate": 2.655562723593924e-05,
"loss": 1.016,
"step": 16000
},
{
"epoch": 0.4719746716697936,
"grad_norm": 1.233428716659546,
"learning_rate": 2.640900826930972e-05,
"loss": 1.0005,
"step": 16100
},
{
"epoch": 0.474906191369606,
"grad_norm": 1.013920545578003,
"learning_rate": 2.6262389302680194e-05,
"loss": 0.9947,
"step": 16200
},
{
"epoch": 0.4778377110694184,
"grad_norm": 0.98600834608078,
"learning_rate": 2.6115770336050672e-05,
"loss": 1.002,
"step": 16300
},
{
"epoch": 0.4807692307692308,
"grad_norm": 0.656637966632843,
"learning_rate": 2.5969151369421153e-05,
"loss": 1.009,
"step": 16400
},
{
"epoch": 0.48370075046904315,
"grad_norm": 1.2900489568710327,
"learning_rate": 2.5822532402791627e-05,
"loss": 1.0082,
"step": 16500
},
{
"epoch": 0.4866322701688555,
"grad_norm": 1.068027377128601,
"learning_rate": 2.56759134361621e-05,
"loss": 1.0088,
"step": 16600
},
{
"epoch": 0.48956378986866794,
"grad_norm": 0.6388201117515564,
"learning_rate": 2.552929446953258e-05,
"loss": 0.9925,
"step": 16700
},
{
"epoch": 0.4924953095684803,
"grad_norm": 0.9185531139373779,
"learning_rate": 2.538267550290306e-05,
"loss": 1.019,
"step": 16800
},
{
"epoch": 0.4954268292682927,
"grad_norm": 1.1571044921875,
"learning_rate": 2.5236056536273534e-05,
"loss": 1.0118,
"step": 16900
},
{
"epoch": 0.49835834896810505,
"grad_norm": 1.3585349321365356,
"learning_rate": 2.5089437569644008e-05,
"loss": 1.0092,
"step": 17000
},
{
"epoch": 0.5012898686679175,
"grad_norm": 1.0178191661834717,
"learning_rate": 2.4942818603014485e-05,
"loss": 0.9954,
"step": 17100
},
{
"epoch": 0.5042213883677298,
"grad_norm": 1.0959707498550415,
"learning_rate": 2.4796199636384966e-05,
"loss": 1.0117,
"step": 17200
},
{
"epoch": 0.5071529080675422,
"grad_norm": 1.6161094903945923,
"learning_rate": 2.464958066975544e-05,
"loss": 0.9874,
"step": 17300
},
{
"epoch": 0.5100844277673546,
"grad_norm": 1.3352782726287842,
"learning_rate": 2.4502961703125918e-05,
"loss": 1.0053,
"step": 17400
},
{
"epoch": 0.5130159474671669,
"grad_norm": 0.9680103659629822,
"learning_rate": 2.4356342736496392e-05,
"loss": 0.9874,
"step": 17500
},
{
"epoch": 0.5159474671669794,
"grad_norm": 1.894599437713623,
"learning_rate": 2.420972376986687e-05,
"loss": 0.9793,
"step": 17600
},
{
"epoch": 0.5188789868667918,
"grad_norm": 2.3807919025421143,
"learning_rate": 2.406310480323735e-05,
"loss": 1.001,
"step": 17700
},
{
"epoch": 0.5218105065666041,
"grad_norm": 1.3010458946228027,
"learning_rate": 2.3916485836607824e-05,
"loss": 0.9976,
"step": 17800
},
{
"epoch": 0.5247420262664165,
"grad_norm": 0.8739995360374451,
"learning_rate": 2.3769866869978302e-05,
"loss": 0.9832,
"step": 17900
},
{
"epoch": 0.5276735459662288,
"grad_norm": 1.0226731300354004,
"learning_rate": 2.3623247903348776e-05,
"loss": 0.9889,
"step": 18000
},
{
"epoch": 0.5306050656660413,
"grad_norm": 0.9787300229072571,
"learning_rate": 2.3476628936719257e-05,
"loss": 1.0001,
"step": 18100
},
{
"epoch": 0.5335365853658537,
"grad_norm": 0.8458353877067566,
"learning_rate": 2.333000997008973e-05,
"loss": 0.9963,
"step": 18200
},
{
"epoch": 0.536468105065666,
"grad_norm": 2.051384210586548,
"learning_rate": 2.318339100346021e-05,
"loss": 0.9974,
"step": 18300
},
{
"epoch": 0.5393996247654784,
"grad_norm": 1.1882303953170776,
"learning_rate": 2.3036772036830686e-05,
"loss": 0.9999,
"step": 18400
},
{
"epoch": 0.5423311444652908,
"grad_norm": 1.2132831811904907,
"learning_rate": 2.289015307020116e-05,
"loss": 0.9826,
"step": 18500
},
{
"epoch": 0.5452626641651032,
"grad_norm": 0.7028972506523132,
"learning_rate": 2.274353410357164e-05,
"loss": 0.9785,
"step": 18600
},
{
"epoch": 0.5481941838649156,
"grad_norm": 0.5096247792243958,
"learning_rate": 2.2596915136942115e-05,
"loss": 0.9985,
"step": 18700
},
{
"epoch": 0.551125703564728,
"grad_norm": 0.7240370512008667,
"learning_rate": 2.2450296170312593e-05,
"loss": 0.9938,
"step": 18800
},
{
"epoch": 0.5540572232645403,
"grad_norm": 1.274845004081726,
"learning_rate": 2.2303677203683067e-05,
"loss": 0.9979,
"step": 18900
},
{
"epoch": 0.5569887429643527,
"grad_norm": 1.0941996574401855,
"learning_rate": 2.2157058237053548e-05,
"loss": 1.0095,
"step": 19000
},
{
"epoch": 0.5599202626641651,
"grad_norm": 0.6764891147613525,
"learning_rate": 2.2010439270424025e-05,
"loss": 0.9839,
"step": 19100
},
{
"epoch": 0.5628517823639775,
"grad_norm": 1.7382996082305908,
"learning_rate": 2.18638203037945e-05,
"loss": 0.9846,
"step": 19200
},
{
"epoch": 0.5657833020637899,
"grad_norm": 0.9387032389640808,
"learning_rate": 2.1717201337164977e-05,
"loss": 0.9895,
"step": 19300
},
{
"epoch": 0.5687148217636022,
"grad_norm": 1.1649117469787598,
"learning_rate": 2.157058237053545e-05,
"loss": 0.9927,
"step": 19400
},
{
"epoch": 0.5716463414634146,
"grad_norm": 0.8027826547622681,
"learning_rate": 2.1423963403905932e-05,
"loss": 0.9856,
"step": 19500
},
{
"epoch": 0.5745778611632271,
"grad_norm": 1.4004380702972412,
"learning_rate": 2.1277344437276406e-05,
"loss": 0.9868,
"step": 19600
},
{
"epoch": 0.5775093808630394,
"grad_norm": 0.5555911064147949,
"learning_rate": 2.1130725470646884e-05,
"loss": 0.9996,
"step": 19700
},
{
"epoch": 0.5804409005628518,
"grad_norm": 1.0119162797927856,
"learning_rate": 2.098410650401736e-05,
"loss": 0.9783,
"step": 19800
},
{
"epoch": 0.5833724202626641,
"grad_norm": 1.3984887599945068,
"learning_rate": 2.083748753738784e-05,
"loss": 0.9743,
"step": 19900
},
{
"epoch": 0.5863039399624765,
"grad_norm": 2.691563844680786,
"learning_rate": 2.0690868570758316e-05,
"loss": 1.0043,
"step": 20000
},
{
"epoch": 0.589235459662289,
"grad_norm": 0.9225094318389893,
"learning_rate": 2.054424960412879e-05,
"loss": 0.987,
"step": 20100
},
{
"epoch": 0.5921669793621013,
"grad_norm": 1.3359028100967407,
"learning_rate": 2.0397630637499268e-05,
"loss": 0.9945,
"step": 20200
},
{
"epoch": 0.5950984990619137,
"grad_norm": 1.1923692226409912,
"learning_rate": 2.0251011670869742e-05,
"loss": 0.9865,
"step": 20300
},
{
"epoch": 0.5980300187617261,
"grad_norm": 4.047428607940674,
"learning_rate": 2.0104392704240223e-05,
"loss": 0.9835,
"step": 20400
},
{
"epoch": 0.6009615384615384,
"grad_norm": 1.8052351474761963,
"learning_rate": 1.99577737376107e-05,
"loss": 0.9852,
"step": 20500
},
{
"epoch": 0.6038930581613509,
"grad_norm": 1.7130268812179565,
"learning_rate": 1.9811154770981174e-05,
"loss": 0.9737,
"step": 20600
},
{
"epoch": 0.6068245778611632,
"grad_norm": 0.9257317185401917,
"learning_rate": 1.9664535804351652e-05,
"loss": 0.9882,
"step": 20700
},
{
"epoch": 0.6097560975609756,
"grad_norm": 1.351476788520813,
"learning_rate": 1.951791683772213e-05,
"loss": 0.9682,
"step": 20800
},
{
"epoch": 0.612687617260788,
"grad_norm": 1.4445241689682007,
"learning_rate": 1.9371297871092607e-05,
"loss": 0.9906,
"step": 20900
},
{
"epoch": 0.6156191369606003,
"grad_norm": 1.3420469760894775,
"learning_rate": 1.922467890446308e-05,
"loss": 0.9661,
"step": 21000
}
],
"logging_steps": 100,
"max_steps": 34112,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.75756302876672e+17,
"train_batch_size": 36,
"trial_name": null,
"trial_params": null
}