|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.3224671669793621, |
|
"eval_steps": 500, |
|
"global_step": 11000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0029315196998123826, |
|
"grad_norm": 0.7248865962028503, |
|
"learning_rate": 4.9868042930033434e-05, |
|
"loss": 1.3593, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.005863039399624765, |
|
"grad_norm": 0.3698488771915436, |
|
"learning_rate": 4.972142396340391e-05, |
|
"loss": 1.3285, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.008794559099437148, |
|
"grad_norm": 0.24848727881908417, |
|
"learning_rate": 4.957480499677439e-05, |
|
"loss": 1.324, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01172607879924953, |
|
"grad_norm": 0.2648352086544037, |
|
"learning_rate": 4.942818603014486e-05, |
|
"loss": 1.3145, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.014657598499061914, |
|
"grad_norm": 0.3995356559753418, |
|
"learning_rate": 4.928156706351534e-05, |
|
"loss": 1.3141, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.017589118198874296, |
|
"grad_norm": 0.32299748063087463, |
|
"learning_rate": 4.913494809688581e-05, |
|
"loss": 1.3087, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02052063789868668, |
|
"grad_norm": 0.33237338066101074, |
|
"learning_rate": 4.898832913025629e-05, |
|
"loss": 1.3071, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.02345215759849906, |
|
"grad_norm": 0.2716487646102905, |
|
"learning_rate": 4.8841710163626766e-05, |
|
"loss": 1.3048, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.026383677298311446, |
|
"grad_norm": 0.8654017448425293, |
|
"learning_rate": 4.869509119699725e-05, |
|
"loss": 1.3087, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.02931519699812383, |
|
"grad_norm": 0.27945375442504883, |
|
"learning_rate": 4.854847223036773e-05, |
|
"loss": 1.3015, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03224671669793621, |
|
"grad_norm": 0.27838173508644104, |
|
"learning_rate": 4.84018532637382e-05, |
|
"loss": 1.2998, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.03517823639774859, |
|
"grad_norm": 0.24157749116420746, |
|
"learning_rate": 4.8255234297108676e-05, |
|
"loss": 1.3016, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.038109756097560975, |
|
"grad_norm": 0.45793822407722473, |
|
"learning_rate": 4.810861533047915e-05, |
|
"loss": 1.2983, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.04104127579737336, |
|
"grad_norm": 0.1312064826488495, |
|
"learning_rate": 4.796199636384963e-05, |
|
"loss": 1.2978, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.04397279549718574, |
|
"grad_norm": 0.33062976598739624, |
|
"learning_rate": 4.7815377397220105e-05, |
|
"loss": 1.2972, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04690431519699812, |
|
"grad_norm": 0.29819390177726746, |
|
"learning_rate": 4.766875843059058e-05, |
|
"loss": 1.2945, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.0498358348968105, |
|
"grad_norm": 0.3396131694316864, |
|
"learning_rate": 4.752213946396106e-05, |
|
"loss": 1.2921, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.05276735459662289, |
|
"grad_norm": 0.3231181502342224, |
|
"learning_rate": 4.737552049733154e-05, |
|
"loss": 1.2932, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.055698874296435275, |
|
"grad_norm": 0.4015660285949707, |
|
"learning_rate": 4.7228901530702015e-05, |
|
"loss": 1.2899, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.05863039399624766, |
|
"grad_norm": 0.436213880777359, |
|
"learning_rate": 4.708228256407249e-05, |
|
"loss": 1.2906, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06156191369606004, |
|
"grad_norm": 0.3451833426952362, |
|
"learning_rate": 4.693566359744297e-05, |
|
"loss": 1.2884, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.06449343339587242, |
|
"grad_norm": 0.41890543699264526, |
|
"learning_rate": 4.6789044630813445e-05, |
|
"loss": 1.2892, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.0674249530956848, |
|
"grad_norm": 0.3117181062698364, |
|
"learning_rate": 4.664242566418392e-05, |
|
"loss": 1.2866, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.07035647279549719, |
|
"grad_norm": 0.2703840136528015, |
|
"learning_rate": 4.64958066975544e-05, |
|
"loss": 1.2869, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.07328799249530957, |
|
"grad_norm": 0.31400740146636963, |
|
"learning_rate": 4.6349187730924874e-05, |
|
"loss": 1.2844, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07621951219512195, |
|
"grad_norm": 0.36265355348587036, |
|
"learning_rate": 4.620256876429535e-05, |
|
"loss": 1.2847, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.07915103189493433, |
|
"grad_norm": 0.32774218916893005, |
|
"learning_rate": 4.605594979766583e-05, |
|
"loss": 1.2889, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.08208255159474671, |
|
"grad_norm": 0.26954421401023865, |
|
"learning_rate": 4.590933083103631e-05, |
|
"loss": 1.2823, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.0850140712945591, |
|
"grad_norm": 0.4042912423610687, |
|
"learning_rate": 4.5762711864406784e-05, |
|
"loss": 1.2869, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.08794559099437148, |
|
"grad_norm": 0.28043991327285767, |
|
"learning_rate": 4.561609289777726e-05, |
|
"loss": 1.2851, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.09087711069418386, |
|
"grad_norm": 0.35115665197372437, |
|
"learning_rate": 4.546947393114774e-05, |
|
"loss": 1.2816, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.09380863039399624, |
|
"grad_norm": 0.3325822651386261, |
|
"learning_rate": 4.532285496451821e-05, |
|
"loss": 1.2821, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.09674015009380862, |
|
"grad_norm": 0.14176355302333832, |
|
"learning_rate": 4.517623599788869e-05, |
|
"loss": 1.2808, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.099671669793621, |
|
"grad_norm": 0.3418448567390442, |
|
"learning_rate": 4.502961703125916e-05, |
|
"loss": 1.2808, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.1026031894934334, |
|
"grad_norm": 0.3579727113246918, |
|
"learning_rate": 4.488299806462964e-05, |
|
"loss": 1.2768, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.10553470919324578, |
|
"grad_norm": 0.2470945566892624, |
|
"learning_rate": 4.473637909800012e-05, |
|
"loss": 1.2812, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.10846622889305817, |
|
"grad_norm": 0.32134631276130676, |
|
"learning_rate": 4.45897601313706e-05, |
|
"loss": 1.2797, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.11139774859287055, |
|
"grad_norm": 0.2982274889945984, |
|
"learning_rate": 4.444314116474108e-05, |
|
"loss": 1.2762, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.11432926829268293, |
|
"grad_norm": 0.2519618570804596, |
|
"learning_rate": 4.429652219811155e-05, |
|
"loss": 1.2767, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.11726078799249531, |
|
"grad_norm": 0.30841368436813354, |
|
"learning_rate": 4.4149903231482026e-05, |
|
"loss": 1.2765, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1201923076923077, |
|
"grad_norm": 0.3413415551185608, |
|
"learning_rate": 4.40032842648525e-05, |
|
"loss": 1.274, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.12312382739212008, |
|
"grad_norm": 0.3912579119205475, |
|
"learning_rate": 4.385666529822298e-05, |
|
"loss": 1.2694, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.12605534709193245, |
|
"grad_norm": 0.39514127373695374, |
|
"learning_rate": 4.3710046331593455e-05, |
|
"loss": 1.2742, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.12898686679174484, |
|
"grad_norm": 0.5046316385269165, |
|
"learning_rate": 4.356342736496393e-05, |
|
"loss": 1.2723, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.1319183864915572, |
|
"grad_norm": 0.40433749556541443, |
|
"learning_rate": 4.341680839833441e-05, |
|
"loss": 1.2714, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1348499061913696, |
|
"grad_norm": 0.41701552271842957, |
|
"learning_rate": 4.327018943170489e-05, |
|
"loss": 1.2686, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.137781425891182, |
|
"grad_norm": 0.6152161359786987, |
|
"learning_rate": 4.3123570465075365e-05, |
|
"loss": 1.2642, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.14071294559099437, |
|
"grad_norm": 0.6579223275184631, |
|
"learning_rate": 4.297695149844584e-05, |
|
"loss": 1.2648, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.14364446529080677, |
|
"grad_norm": 0.47039562463760376, |
|
"learning_rate": 4.283033253181632e-05, |
|
"loss": 1.2623, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.14657598499061913, |
|
"grad_norm": 0.5053458213806152, |
|
"learning_rate": 4.2683713565186795e-05, |
|
"loss": 1.2581, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.14950750469043153, |
|
"grad_norm": 0.6096323728561401, |
|
"learning_rate": 4.253709459855727e-05, |
|
"loss": 1.2526, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.1524390243902439, |
|
"grad_norm": 0.6972830891609192, |
|
"learning_rate": 4.239047563192775e-05, |
|
"loss": 1.2541, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.1553705440900563, |
|
"grad_norm": 0.27344340085983276, |
|
"learning_rate": 4.2243856665298224e-05, |
|
"loss": 1.2496, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.15830206378986866, |
|
"grad_norm": 0.6050379276275635, |
|
"learning_rate": 4.20972376986687e-05, |
|
"loss": 1.243, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.16123358348968106, |
|
"grad_norm": 0.4654715061187744, |
|
"learning_rate": 4.195061873203918e-05, |
|
"loss": 1.2398, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.16416510318949343, |
|
"grad_norm": 0.8257409334182739, |
|
"learning_rate": 4.180399976540966e-05, |
|
"loss": 1.2377, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.16709662288930582, |
|
"grad_norm": 0.74887615442276, |
|
"learning_rate": 4.1657380798780134e-05, |
|
"loss": 1.236, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.1700281425891182, |
|
"grad_norm": 0.7653385996818542, |
|
"learning_rate": 4.151076183215061e-05, |
|
"loss": 1.2276, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.1729596622889306, |
|
"grad_norm": 0.7907219529151917, |
|
"learning_rate": 4.136414286552109e-05, |
|
"loss": 1.221, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.17589118198874296, |
|
"grad_norm": 0.6932777762413025, |
|
"learning_rate": 4.121752389889156e-05, |
|
"loss": 1.2205, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.17882270168855535, |
|
"grad_norm": 0.7687219381332397, |
|
"learning_rate": 4.107090493226204e-05, |
|
"loss": 1.2145, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.18175422138836772, |
|
"grad_norm": 0.850978434085846, |
|
"learning_rate": 4.092428596563251e-05, |
|
"loss": 1.2066, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.18468574108818012, |
|
"grad_norm": 0.8447002172470093, |
|
"learning_rate": 4.077766699900299e-05, |
|
"loss": 1.2091, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.18761726078799248, |
|
"grad_norm": 1.415692687034607, |
|
"learning_rate": 4.063104803237347e-05, |
|
"loss": 1.1981, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.19054878048780488, |
|
"grad_norm": 0.9456603527069092, |
|
"learning_rate": 4.048442906574395e-05, |
|
"loss": 1.2041, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.19348030018761725, |
|
"grad_norm": 1.8772284984588623, |
|
"learning_rate": 4.033781009911443e-05, |
|
"loss": 1.199, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.19641181988742965, |
|
"grad_norm": 0.8293085694313049, |
|
"learning_rate": 4.01911911324849e-05, |
|
"loss": 1.1886, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.199343339587242, |
|
"grad_norm": 1.4174566268920898, |
|
"learning_rate": 4.0044572165855376e-05, |
|
"loss": 1.1851, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.2022748592870544, |
|
"grad_norm": 1.6964879035949707, |
|
"learning_rate": 3.989795319922585e-05, |
|
"loss": 1.1851, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.2052063789868668, |
|
"grad_norm": 2.025838613510132, |
|
"learning_rate": 3.975133423259633e-05, |
|
"loss": 1.1778, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.20813789868667917, |
|
"grad_norm": 1.919710397720337, |
|
"learning_rate": 3.9604715265966805e-05, |
|
"loss": 1.1717, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.21106941838649157, |
|
"grad_norm": 1.467106580734253, |
|
"learning_rate": 3.945809629933728e-05, |
|
"loss": 1.1593, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.21400093808630394, |
|
"grad_norm": 2.1268837451934814, |
|
"learning_rate": 3.931147733270776e-05, |
|
"loss": 1.162, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.21693245778611633, |
|
"grad_norm": 1.1182403564453125, |
|
"learning_rate": 3.916485836607824e-05, |
|
"loss": 1.1487, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.2198639774859287, |
|
"grad_norm": 1.3415223360061646, |
|
"learning_rate": 3.9018239399448715e-05, |
|
"loss": 1.1471, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.2227954971857411, |
|
"grad_norm": 1.8642910718917847, |
|
"learning_rate": 3.887162043281919e-05, |
|
"loss": 1.1555, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.22572701688555347, |
|
"grad_norm": 1.912855625152588, |
|
"learning_rate": 3.872500146618967e-05, |
|
"loss": 1.1328, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.22865853658536586, |
|
"grad_norm": 2.582273244857788, |
|
"learning_rate": 3.8578382499560145e-05, |
|
"loss": 1.1394, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.23159005628517823, |
|
"grad_norm": 1.6247990131378174, |
|
"learning_rate": 3.843176353293062e-05, |
|
"loss": 1.1413, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.23452157598499063, |
|
"grad_norm": 2.7654881477355957, |
|
"learning_rate": 3.82851445663011e-05, |
|
"loss": 1.1219, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.237453095684803, |
|
"grad_norm": 1.3285764455795288, |
|
"learning_rate": 3.8138525599671574e-05, |
|
"loss": 1.1267, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.2403846153846154, |
|
"grad_norm": 3.288980484008789, |
|
"learning_rate": 3.7991906633042055e-05, |
|
"loss": 1.1151, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.24331613508442776, |
|
"grad_norm": 1.586205244064331, |
|
"learning_rate": 3.784528766641253e-05, |
|
"loss": 1.1202, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.24624765478424016, |
|
"grad_norm": 2.017575979232788, |
|
"learning_rate": 3.769866869978301e-05, |
|
"loss": 1.12, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.24917917448405252, |
|
"grad_norm": 3.5337297916412354, |
|
"learning_rate": 3.7552049733153484e-05, |
|
"loss": 1.109, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.2521106941838649, |
|
"grad_norm": 2.3067338466644287, |
|
"learning_rate": 3.740543076652396e-05, |
|
"loss": 1.1157, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.2550422138836773, |
|
"grad_norm": 1.8356772661209106, |
|
"learning_rate": 3.725881179989444e-05, |
|
"loss": 1.1013, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.2579737335834897, |
|
"grad_norm": 1.1588494777679443, |
|
"learning_rate": 3.711219283326491e-05, |
|
"loss": 1.0838, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.26090525328330205, |
|
"grad_norm": 2.9327263832092285, |
|
"learning_rate": 3.696557386663539e-05, |
|
"loss": 1.0998, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.2638367729831144, |
|
"grad_norm": 1.1504569053649902, |
|
"learning_rate": 3.681895490000586e-05, |
|
"loss": 1.1067, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.26676829268292684, |
|
"grad_norm": 1.546099066734314, |
|
"learning_rate": 3.667233593337634e-05, |
|
"loss": 1.0985, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.2696998123827392, |
|
"grad_norm": 2.004364252090454, |
|
"learning_rate": 3.652571696674682e-05, |
|
"loss": 1.0906, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.2726313320825516, |
|
"grad_norm": 2.3435940742492676, |
|
"learning_rate": 3.63790980001173e-05, |
|
"loss": 1.0815, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.275562851782364, |
|
"grad_norm": 1.5270298719406128, |
|
"learning_rate": 3.623247903348778e-05, |
|
"loss": 1.08, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.2784943714821764, |
|
"grad_norm": 4.2500386238098145, |
|
"learning_rate": 3.608586006685825e-05, |
|
"loss": 1.0857, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.28142589118198874, |
|
"grad_norm": 1.3212778568267822, |
|
"learning_rate": 3.5939241100228726e-05, |
|
"loss": 1.0717, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.2843574108818011, |
|
"grad_norm": 3.6155266761779785, |
|
"learning_rate": 3.57926221335992e-05, |
|
"loss": 1.0798, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 0.28728893058161353, |
|
"grad_norm": 3.934211492538452, |
|
"learning_rate": 3.564600316696968e-05, |
|
"loss": 1.0777, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 0.2902204502814259, |
|
"grad_norm": 1.3743681907653809, |
|
"learning_rate": 3.5499384200340155e-05, |
|
"loss": 1.0776, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.29315196998123827, |
|
"grad_norm": 1.2762603759765625, |
|
"learning_rate": 3.5352765233710636e-05, |
|
"loss": 1.0653, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.29608348968105064, |
|
"grad_norm": 1.7702666521072388, |
|
"learning_rate": 3.520614626708112e-05, |
|
"loss": 1.0623, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 0.29901500938086306, |
|
"grad_norm": 1.9537125825881958, |
|
"learning_rate": 3.505952730045159e-05, |
|
"loss": 1.077, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 0.30194652908067543, |
|
"grad_norm": 1.6890079975128174, |
|
"learning_rate": 3.4912908333822065e-05, |
|
"loss": 1.0689, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 0.3048780487804878, |
|
"grad_norm": 2.9840946197509766, |
|
"learning_rate": 3.476628936719254e-05, |
|
"loss": 1.0683, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 0.30780956848030017, |
|
"grad_norm": 0.620637834072113, |
|
"learning_rate": 3.461967040056302e-05, |
|
"loss": 1.0599, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.3107410881801126, |
|
"grad_norm": 2.518418073654175, |
|
"learning_rate": 3.4473051433933494e-05, |
|
"loss": 1.0511, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 0.31367260787992496, |
|
"grad_norm": 4.154684543609619, |
|
"learning_rate": 3.432643246730397e-05, |
|
"loss": 1.0639, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 0.3166041275797373, |
|
"grad_norm": 2.329303741455078, |
|
"learning_rate": 3.417981350067445e-05, |
|
"loss": 1.0606, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 0.3195356472795497, |
|
"grad_norm": 1.5261120796203613, |
|
"learning_rate": 3.4033194534044924e-05, |
|
"loss": 1.0651, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 0.3224671669793621, |
|
"grad_norm": 1.4638831615447998, |
|
"learning_rate": 3.3886575567415405e-05, |
|
"loss": 1.0508, |
|
"step": 11000 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 34112, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.2062825316352e+16, |
|
"train_batch_size": 36, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|