|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 9480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010548523206751054, |
|
"grad_norm": 1.3124364614486694, |
|
"learning_rate": 0.00015822784810126583, |
|
"loss": 7.5106, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002109704641350211, |
|
"grad_norm": 1.1659833192825317, |
|
"learning_rate": 0.00031645569620253165, |
|
"loss": 6.8744, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0031645569620253164, |
|
"grad_norm": 0.8274556994438171, |
|
"learning_rate": 0.00047468354430379745, |
|
"loss": 6.2127, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004219409282700422, |
|
"grad_norm": 2.096945285797119, |
|
"learning_rate": 0.0006329113924050633, |
|
"loss": 5.7132, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.005274261603375527, |
|
"grad_norm": 0.9831845760345459, |
|
"learning_rate": 0.0007911392405063291, |
|
"loss": 5.2337, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.006329113924050633, |
|
"grad_norm": 1.2277730703353882, |
|
"learning_rate": 0.0009493670886075949, |
|
"loss": 4.7354, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.007383966244725738, |
|
"grad_norm": 1.048064947128296, |
|
"learning_rate": 0.0011075949367088608, |
|
"loss": 4.3617, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.008438818565400843, |
|
"grad_norm": 0.9260877370834351, |
|
"learning_rate": 0.0012658227848101266, |
|
"loss": 4.1292, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00949367088607595, |
|
"grad_norm": 1.345253348350525, |
|
"learning_rate": 0.0014240506329113926, |
|
"loss": 3.9271, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.010548523206751054, |
|
"grad_norm": 1.2292276620864868, |
|
"learning_rate": 0.0015, |
|
"loss": 3.7717, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.011603375527426161, |
|
"grad_norm": 0.6336931586265564, |
|
"learning_rate": 0.0015, |
|
"loss": 3.616, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.012658227848101266, |
|
"grad_norm": 0.738336980342865, |
|
"learning_rate": 0.0015, |
|
"loss": 3.504, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.013713080168776372, |
|
"grad_norm": 0.8369924426078796, |
|
"learning_rate": 0.0015, |
|
"loss": 3.3989, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.014767932489451477, |
|
"grad_norm": 0.646684467792511, |
|
"learning_rate": 0.0015, |
|
"loss": 3.2992, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.015822784810126583, |
|
"grad_norm": 0.9423049688339233, |
|
"learning_rate": 0.0015, |
|
"loss": 3.2291, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.016877637130801686, |
|
"grad_norm": 1.4281316995620728, |
|
"learning_rate": 0.0015, |
|
"loss": 3.1643, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.017932489451476793, |
|
"grad_norm": 0.7731781601905823, |
|
"learning_rate": 0.0015, |
|
"loss": 3.1024, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0189873417721519, |
|
"grad_norm": 0.860463559627533, |
|
"learning_rate": 0.0015, |
|
"loss": 3.0604, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.020042194092827006, |
|
"grad_norm": 1.0924943685531616, |
|
"learning_rate": 0.0015, |
|
"loss": 3.0025, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02109704641350211, |
|
"grad_norm": 1.1869707107543945, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9583, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.022151898734177215, |
|
"grad_norm": 0.9566922187805176, |
|
"learning_rate": 0.0015, |
|
"loss": 2.9292, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.023206751054852322, |
|
"grad_norm": 0.8435981869697571, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8961, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.024261603375527425, |
|
"grad_norm": 0.7190911769866943, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8459, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02531645569620253, |
|
"grad_norm": 0.8628165125846863, |
|
"learning_rate": 0.0015, |
|
"loss": 2.8162, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.026371308016877638, |
|
"grad_norm": 0.9168346524238586, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7849, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.027426160337552744, |
|
"grad_norm": 0.9622735977172852, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7517, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.028481012658227847, |
|
"grad_norm": 1.2251378297805786, |
|
"learning_rate": 0.0015, |
|
"loss": 2.7257, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.029535864978902954, |
|
"grad_norm": 1.2179166078567505, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6938, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.03059071729957806, |
|
"grad_norm": 0.8709646463394165, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6663, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.03164556962025317, |
|
"grad_norm": 0.8570034503936768, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6402, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03270042194092827, |
|
"grad_norm": 0.9226229786872864, |
|
"learning_rate": 0.0015, |
|
"loss": 2.6211, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03375527426160337, |
|
"grad_norm": 0.6741663813591003, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5844, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03481012658227848, |
|
"grad_norm": 1.1187233924865723, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5831, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.035864978902953586, |
|
"grad_norm": 0.7607849836349487, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5591, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03691983122362869, |
|
"grad_norm": 1.4700108766555786, |
|
"learning_rate": 0.0015, |
|
"loss": 2.5197, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.0379746835443038, |
|
"grad_norm": 0.8260409832000732, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4953, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.039029535864978905, |
|
"grad_norm": 0.9084756374359131, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4791, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.04008438818565401, |
|
"grad_norm": 0.797562301158905, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4715, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.04113924050632911, |
|
"grad_norm": 1.210136890411377, |
|
"learning_rate": 0.0015, |
|
"loss": 2.448, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.04219409282700422, |
|
"grad_norm": 0.7420892715454102, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4179, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.043248945147679324, |
|
"grad_norm": 0.9167479872703552, |
|
"learning_rate": 0.0015, |
|
"loss": 2.4091, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.04430379746835443, |
|
"grad_norm": 0.8478865623474121, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3941, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04535864978902954, |
|
"grad_norm": 0.9713917374610901, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3615, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.046413502109704644, |
|
"grad_norm": 0.8634635806083679, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3476, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04746835443037975, |
|
"grad_norm": 0.828904390335083, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3529, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04852320675105485, |
|
"grad_norm": 0.777154803276062, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3204, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.049578059071729956, |
|
"grad_norm": 0.7510536909103394, |
|
"learning_rate": 0.0015, |
|
"loss": 2.3013, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 1.222251534461975, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2974, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.05168776371308017, |
|
"grad_norm": 0.7576515078544617, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2676, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.052742616033755275, |
|
"grad_norm": 0.8541025519371033, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2628, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05379746835443038, |
|
"grad_norm": 0.9027695655822754, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2526, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.05485232067510549, |
|
"grad_norm": 0.7879486680030823, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2404, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.05590717299578059, |
|
"grad_norm": 0.7479010224342346, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2215, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.056962025316455694, |
|
"grad_norm": 0.9469801783561707, |
|
"learning_rate": 0.0015, |
|
"loss": 2.2036, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0580168776371308, |
|
"grad_norm": 0.7802208662033081, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1999, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.05907172995780591, |
|
"grad_norm": 0.8081716299057007, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1678, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.060126582278481014, |
|
"grad_norm": 0.7798997759819031, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1736, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.06118143459915612, |
|
"grad_norm": 0.8570801019668579, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1711, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.06223628691983123, |
|
"grad_norm": 0.9379898905754089, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1417, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.06329113924050633, |
|
"grad_norm": 1.5215798616409302, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1266, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.06434599156118144, |
|
"grad_norm": 0.9008197784423828, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1324, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.06540084388185655, |
|
"grad_norm": 0.6915869116783142, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1085, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.06645569620253164, |
|
"grad_norm": 0.8122121095657349, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0962, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.06751054852320675, |
|
"grad_norm": 0.7248497605323792, |
|
"learning_rate": 0.0015, |
|
"loss": 2.1083, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.06856540084388185, |
|
"grad_norm": 0.7885815501213074, |
|
"learning_rate": 0.0015, |
|
"loss": 2.096, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.06962025316455696, |
|
"grad_norm": 1.0940124988555908, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0864, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.07067510548523206, |
|
"grad_norm": 0.9496759176254272, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0683, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.07172995780590717, |
|
"grad_norm": 0.7619010806083679, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0573, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.07278481012658228, |
|
"grad_norm": 0.6933894753456116, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0698, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.07383966244725738, |
|
"grad_norm": 0.7526223659515381, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0488, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.07489451476793249, |
|
"grad_norm": 0.6634929180145264, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0402, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.0759493670886076, |
|
"grad_norm": 1.2680834531784058, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0199, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.0770042194092827, |
|
"grad_norm": 0.9700834155082703, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0161, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.07805907172995781, |
|
"grad_norm": 0.8383921384811401, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0247, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.07911392405063292, |
|
"grad_norm": 0.845635175704956, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0064, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.08016877637130802, |
|
"grad_norm": 1.2328712940216064, |
|
"learning_rate": 0.0015, |
|
"loss": 2.0011, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.08122362869198312, |
|
"grad_norm": 1.0234324932098389, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9948, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.08227848101265822, |
|
"grad_norm": 0.9338547587394714, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9883, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.08333333333333333, |
|
"grad_norm": 0.7371922135353088, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9939, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.08438818565400844, |
|
"grad_norm": 0.7209744453430176, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9682, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.08544303797468354, |
|
"grad_norm": 0.7245383262634277, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9563, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.08649789029535865, |
|
"grad_norm": 1.1245934963226318, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9653, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.08755274261603375, |
|
"grad_norm": 0.7008262872695923, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9652, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.08860759493670886, |
|
"grad_norm": 0.9483989477157593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9461, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.08966244725738397, |
|
"grad_norm": 0.717894971370697, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9419, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.09071729957805907, |
|
"grad_norm": 0.7447081208229065, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9442, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.09177215189873418, |
|
"grad_norm": 0.8731544613838196, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9419, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.09282700421940929, |
|
"grad_norm": 0.9204733371734619, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9304, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.0938818565400844, |
|
"grad_norm": 1.242817997932434, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9259, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.0949367088607595, |
|
"grad_norm": 0.9278952479362488, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9245, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.09599156118143459, |
|
"grad_norm": 0.7062042951583862, |
|
"learning_rate": 0.0015, |
|
"loss": 1.926, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.0970464135021097, |
|
"grad_norm": 0.786612868309021, |
|
"learning_rate": 0.0015, |
|
"loss": 1.9149, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.0981012658227848, |
|
"grad_norm": 1.001168131828308, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8969, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.09915611814345991, |
|
"grad_norm": 0.9273200035095215, |
|
"learning_rate": 0.0015, |
|
"loss": 1.908, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.10021097046413502, |
|
"grad_norm": 0.8182703852653503, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8871, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 0.8490096926689148, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8907, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.10232067510548523, |
|
"grad_norm": 1.2461068630218506, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8969, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.10337552742616034, |
|
"grad_norm": 0.9289028644561768, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8805, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.10443037974683544, |
|
"grad_norm": 1.049816608428955, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8751, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.10548523206751055, |
|
"grad_norm": 0.8595777750015259, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8825, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.10654008438818566, |
|
"grad_norm": 0.7875465750694275, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8747, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.10759493670886076, |
|
"grad_norm": 0.9580855965614319, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8681, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.10864978902953587, |
|
"grad_norm": 0.7750164866447449, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8684, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.10970464135021098, |
|
"grad_norm": 0.7347003817558289, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8626, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.11075949367088607, |
|
"grad_norm": 0.9035710096359253, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8654, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.11181434599156118, |
|
"grad_norm": 0.6644186973571777, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8485, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.11286919831223628, |
|
"grad_norm": 0.6655753254890442, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8376, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.11392405063291139, |
|
"grad_norm": 0.9941337704658508, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8394, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.1149789029535865, |
|
"grad_norm": 0.935934841632843, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8478, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1160337552742616, |
|
"grad_norm": 0.7573521137237549, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8373, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.11708860759493671, |
|
"grad_norm": 0.7465738654136658, |
|
"learning_rate": 0.0015, |
|
"loss": 1.839, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.11814345991561181, |
|
"grad_norm": 0.8040273189544678, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8344, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.11919831223628692, |
|
"grad_norm": 1.0702176094055176, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8198, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.12025316455696203, |
|
"grad_norm": 1.0176023244857788, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8329, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.12130801687763713, |
|
"grad_norm": 1.320648431777954, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8165, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.12236286919831224, |
|
"grad_norm": 0.8231999278068542, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8279, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.12341772151898735, |
|
"grad_norm": 0.6487593054771423, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8084, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.12447257383966245, |
|
"grad_norm": 0.8109908699989319, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7991, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.12552742616033755, |
|
"grad_norm": 0.6869083046913147, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8032, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.12658227848101267, |
|
"grad_norm": 1.4142192602157593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8208, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.12763713080168776, |
|
"grad_norm": 1.269906759262085, |
|
"learning_rate": 0.0015, |
|
"loss": 1.8178, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.12869198312236288, |
|
"grad_norm": 0.9178128838539124, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7924, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.12974683544303797, |
|
"grad_norm": 0.8090775012969971, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7765, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.1308016877637131, |
|
"grad_norm": 0.6909911036491394, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7947, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.13185654008438819, |
|
"grad_norm": 0.7301704287528992, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7976, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.13291139240506328, |
|
"grad_norm": 0.7470296621322632, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7845, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.1339662447257384, |
|
"grad_norm": 0.6710943579673767, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7799, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.1350210970464135, |
|
"grad_norm": 0.652271568775177, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7962, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.1360759493670886, |
|
"grad_norm": 0.7345850467681885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7836, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.1371308016877637, |
|
"grad_norm": 0.7135871052742004, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7707, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.13818565400843882, |
|
"grad_norm": 0.643592357635498, |
|
"learning_rate": 0.0015, |
|
"loss": 1.772, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.13924050632911392, |
|
"grad_norm": 0.7280349731445312, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7779, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.14029535864978904, |
|
"grad_norm": 1.4107588529586792, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7671, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.14135021097046413, |
|
"grad_norm": 0.709518551826477, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7736, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.14240506329113925, |
|
"grad_norm": 0.706581711769104, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7738, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.14345991561181434, |
|
"grad_norm": 0.6494163870811462, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7618, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.14451476793248946, |
|
"grad_norm": 0.8145359754562378, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7601, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.14556962025316456, |
|
"grad_norm": 0.7344682216644287, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7516, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.14662447257383968, |
|
"grad_norm": 1.0990504026412964, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7637, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.14767932489451477, |
|
"grad_norm": 0.7976487278938293, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7469, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.14873417721518986, |
|
"grad_norm": 0.7702522277832031, |
|
"learning_rate": 0.0015, |
|
"loss": 1.749, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.14978902953586498, |
|
"grad_norm": 0.6728357672691345, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7529, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.15084388185654007, |
|
"grad_norm": 0.924201250076294, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7487, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 0.7881402373313904, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7482, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.1529535864978903, |
|
"grad_norm": 0.7286288142204285, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7428, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.1540084388185654, |
|
"grad_norm": 0.6509220004081726, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7408, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1550632911392405, |
|
"grad_norm": 1.1083709001541138, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7375, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.15611814345991562, |
|
"grad_norm": 0.6865395307540894, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7398, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.1571729957805907, |
|
"grad_norm": 0.7428089380264282, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7375, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.15822784810126583, |
|
"grad_norm": 0.745134174823761, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7374, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.15928270042194093, |
|
"grad_norm": 0.8267588019371033, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7343, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.16033755274261605, |
|
"grad_norm": 0.7420427203178406, |
|
"learning_rate": 0.0015, |
|
"loss": 1.735, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.16139240506329114, |
|
"grad_norm": 0.643179178237915, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7154, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.16244725738396623, |
|
"grad_norm": 0.6770628690719604, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7102, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.16350210970464135, |
|
"grad_norm": 1.2731250524520874, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7121, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.16455696202531644, |
|
"grad_norm": 1.104306697845459, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7241, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.16561181434599156, |
|
"grad_norm": 0.757106602191925, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7131, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.16666666666666666, |
|
"grad_norm": 0.8353426456451416, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7125, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.16772151898734178, |
|
"grad_norm": 1.0011383295059204, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7099, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.16877637130801687, |
|
"grad_norm": 0.7113327980041504, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7154, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.169831223628692, |
|
"grad_norm": 0.723044753074646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7163, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.17088607594936708, |
|
"grad_norm": 0.6448427438735962, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6848, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.1719409282700422, |
|
"grad_norm": 0.7718586325645447, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6985, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.1729957805907173, |
|
"grad_norm": 0.7018836140632629, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7001, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.17405063291139242, |
|
"grad_norm": 0.8802307844161987, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6923, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.1751054852320675, |
|
"grad_norm": 0.782353937625885, |
|
"learning_rate": 0.0015, |
|
"loss": 1.691, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.17616033755274263, |
|
"grad_norm": 0.732453465461731, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7032, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.17721518987341772, |
|
"grad_norm": 0.8128688335418701, |
|
"learning_rate": 0.0015, |
|
"loss": 1.689, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.17827004219409281, |
|
"grad_norm": 0.7799118757247925, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6798, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.17932489451476794, |
|
"grad_norm": 0.6542706489562988, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6939, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.18037974683544303, |
|
"grad_norm": 0.6584886908531189, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6763, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.18143459915611815, |
|
"grad_norm": 0.7360519766807556, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6777, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.18248945147679324, |
|
"grad_norm": 0.868495523929596, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6815, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.18354430379746836, |
|
"grad_norm": 0.8044885993003845, |
|
"learning_rate": 0.0015, |
|
"loss": 1.684, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.18459915611814345, |
|
"grad_norm": 0.7511466145515442, |
|
"learning_rate": 0.0015, |
|
"loss": 1.7041, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.18565400843881857, |
|
"grad_norm": 0.6682915091514587, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6836, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.18670886075949367, |
|
"grad_norm": 0.6851530075073242, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6811, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.1877637130801688, |
|
"grad_norm": 0.8657528758049011, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6873, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.18881856540084388, |
|
"grad_norm": 1.4020063877105713, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6893, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.189873417721519, |
|
"grad_norm": 0.8835797905921936, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6793, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.1909282700421941, |
|
"grad_norm": 0.629051685333252, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6651, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.19198312236286919, |
|
"grad_norm": 0.7854632139205933, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6655, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1930379746835443, |
|
"grad_norm": 0.6483657360076904, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6634, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.1940928270042194, |
|
"grad_norm": 0.7135815620422363, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6644, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.19514767932489452, |
|
"grad_norm": 0.7771666049957275, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6689, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.1962025316455696, |
|
"grad_norm": 0.8964939713478088, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6729, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.19725738396624473, |
|
"grad_norm": 0.6993046998977661, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6697, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.19831223628691982, |
|
"grad_norm": 0.6576790809631348, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6567, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.19936708860759494, |
|
"grad_norm": 0.7285045981407166, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6631, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.20042194092827004, |
|
"grad_norm": 0.9496256113052368, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6597, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.20147679324894516, |
|
"grad_norm": 0.7459031939506531, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6583, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 0.8031527996063232, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6603, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.20358649789029537, |
|
"grad_norm": 0.801005482673645, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6596, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.20464135021097046, |
|
"grad_norm": 0.707309901714325, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6475, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.20569620253164558, |
|
"grad_norm": 0.6514493823051453, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6458, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.20675105485232068, |
|
"grad_norm": 0.7231051921844482, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6509, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.20780590717299577, |
|
"grad_norm": 0.6800807118415833, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6629, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.2088607594936709, |
|
"grad_norm": 0.6729750037193298, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6425, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.20991561181434598, |
|
"grad_norm": 0.654437243938446, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6403, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.2109704641350211, |
|
"grad_norm": 0.8978038430213928, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6408, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2120253164556962, |
|
"grad_norm": 0.6976383924484253, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6503, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.21308016877637131, |
|
"grad_norm": 0.8478453755378723, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6584, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.2141350210970464, |
|
"grad_norm": 0.719779372215271, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6435, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.21518987341772153, |
|
"grad_norm": 0.6123868227005005, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6438, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.21624472573839662, |
|
"grad_norm": 0.7718461751937866, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6403, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.21729957805907174, |
|
"grad_norm": 0.7014973759651184, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6331, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.21835443037974683, |
|
"grad_norm": 1.0192010402679443, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6352, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.21940928270042195, |
|
"grad_norm": 0.7457175254821777, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6402, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.22046413502109705, |
|
"grad_norm": 1.0028305053710938, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6344, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.22151898734177214, |
|
"grad_norm": 1.0459880828857422, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6409, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.22257383966244726, |
|
"grad_norm": 0.7769726514816284, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6245, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.22362869198312235, |
|
"grad_norm": 0.6035235524177551, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6222, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.22468354430379747, |
|
"grad_norm": 0.7195689082145691, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6295, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.22573839662447256, |
|
"grad_norm": 0.6837499141693115, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6224, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.22679324894514769, |
|
"grad_norm": 0.9146136045455933, |
|
"learning_rate": 0.0015, |
|
"loss": 1.63, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.22784810126582278, |
|
"grad_norm": 0.9756319522857666, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6454, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.2289029535864979, |
|
"grad_norm": 0.8231519460678101, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6263, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.229957805907173, |
|
"grad_norm": 0.8124420046806335, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6107, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.2310126582278481, |
|
"grad_norm": 0.8022341132164001, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6293, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.2320675105485232, |
|
"grad_norm": 0.6795050501823425, |
|
"learning_rate": 0.0015, |
|
"loss": 1.63, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.23312236286919832, |
|
"grad_norm": 0.783323347568512, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6145, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.23417721518987342, |
|
"grad_norm": 0.6651756763458252, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6263, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.23523206751054854, |
|
"grad_norm": 1.179243803024292, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6212, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.23628691983122363, |
|
"grad_norm": 0.8220842480659485, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6209, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.23734177215189872, |
|
"grad_norm": 1.0973544120788574, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6117, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.23839662447257384, |
|
"grad_norm": 1.2406624555587769, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6039, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.23945147679324894, |
|
"grad_norm": 0.7092892527580261, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6396, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.24050632911392406, |
|
"grad_norm": 0.7969619631767273, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6216, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.24156118143459915, |
|
"grad_norm": 0.8276784420013428, |
|
"learning_rate": 0.0015, |
|
"loss": 1.608, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.24261603375527427, |
|
"grad_norm": 0.9074499607086182, |
|
"learning_rate": 0.0015, |
|
"loss": 1.606, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.24367088607594936, |
|
"grad_norm": 0.6614257097244263, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6058, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.24472573839662448, |
|
"grad_norm": 0.6448475122451782, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6141, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.24578059071729957, |
|
"grad_norm": 0.7352731227874756, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5993, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.2468354430379747, |
|
"grad_norm": 0.8879994750022888, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5969, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.2478902953586498, |
|
"grad_norm": 0.7568689584732056, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6009, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.2489451476793249, |
|
"grad_norm": 0.7219222187995911, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6043, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.708109438419342, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6037, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.2510548523206751, |
|
"grad_norm": 0.6003740429878235, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6037, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.2521097046413502, |
|
"grad_norm": 0.6674261093139648, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6059, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 0.6381140947341919, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6029, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.2542194092827004, |
|
"grad_norm": 0.6798749566078186, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6053, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.2552742616033755, |
|
"grad_norm": 0.8452035188674927, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5994, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.2563291139240506, |
|
"grad_norm": 0.6483489274978638, |
|
"learning_rate": 0.0015, |
|
"loss": 1.597, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.25738396624472576, |
|
"grad_norm": 0.7470886707305908, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6056, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.25843881856540085, |
|
"grad_norm": 0.6924394369125366, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5921, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.25949367088607594, |
|
"grad_norm": 0.6187343597412109, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6053, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.26054852320675104, |
|
"grad_norm": 0.944770097732544, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5954, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.2616033755274262, |
|
"grad_norm": 0.760418176651001, |
|
"learning_rate": 0.0015, |
|
"loss": 1.6016, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.2626582278481013, |
|
"grad_norm": 0.767612874507904, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5883, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.26371308016877637, |
|
"grad_norm": 0.6893068552017212, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5943, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.26476793248945146, |
|
"grad_norm": 0.6650967001914978, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5914, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.26582278481012656, |
|
"grad_norm": 0.745049774646759, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5954, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.2668776371308017, |
|
"grad_norm": 0.6933596134185791, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5913, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.2679324894514768, |
|
"grad_norm": 0.6802793741226196, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5928, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.2689873417721519, |
|
"grad_norm": 0.6025201082229614, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5859, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.270042194092827, |
|
"grad_norm": 0.680928647518158, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5962, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.27109704641350213, |
|
"grad_norm": 0.71792072057724, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5904, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.2721518987341772, |
|
"grad_norm": 0.6014297008514404, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5882, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.2732067510548523, |
|
"grad_norm": 0.6050705909729004, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5924, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.2742616033755274, |
|
"grad_norm": 0.688946545124054, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5909, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.27531645569620256, |
|
"grad_norm": 0.6885754466056824, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5913, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.27637130801687765, |
|
"grad_norm": 0.6848039031028748, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5892, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.27742616033755274, |
|
"grad_norm": 0.7295860052108765, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5763, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.27848101265822783, |
|
"grad_norm": 0.7082112431526184, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5896, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.2795358649789029, |
|
"grad_norm": 0.820855975151062, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5832, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.2805907172995781, |
|
"grad_norm": 0.653474748134613, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5757, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.28164556962025317, |
|
"grad_norm": 0.6177871227264404, |
|
"learning_rate": 0.0015, |
|
"loss": 1.574, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.28270042194092826, |
|
"grad_norm": 0.7549741268157959, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5763, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.28375527426160335, |
|
"grad_norm": 0.6169193983078003, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5787, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.2848101265822785, |
|
"grad_norm": 0.7113834023475647, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5706, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.2858649789029536, |
|
"grad_norm": 0.7188537120819092, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5774, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.2869198312236287, |
|
"grad_norm": 0.6048180460929871, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5795, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.2879746835443038, |
|
"grad_norm": 0.7804025411605835, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5591, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.2890295358649789, |
|
"grad_norm": 1.2684450149536133, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5732, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.290084388185654, |
|
"grad_norm": 0.6620485782623291, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5801, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.2911392405063291, |
|
"grad_norm": 0.6825975775718689, |
|
"learning_rate": 0.0015, |
|
"loss": 1.588, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.2921940928270042, |
|
"grad_norm": 0.6704723238945007, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5789, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.29324894514767935, |
|
"grad_norm": 0.7003452777862549, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5759, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.29430379746835444, |
|
"grad_norm": 0.5659245252609253, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5726, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.29535864978902954, |
|
"grad_norm": 0.7406479120254517, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5701, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.29641350210970463, |
|
"grad_norm": 0.7669352293014526, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5706, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.2974683544303797, |
|
"grad_norm": 0.7264388799667358, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5775, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.29852320675105487, |
|
"grad_norm": 1.3137651681900024, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5563, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.29957805907172996, |
|
"grad_norm": 0.7707687616348267, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5596, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.30063291139240506, |
|
"grad_norm": 0.674060046672821, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5593, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.30168776371308015, |
|
"grad_norm": 0.6680307388305664, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5656, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.3027426160337553, |
|
"grad_norm": 0.7418972849845886, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5651, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 0.638434648513794, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5542, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.3048523206751055, |
|
"grad_norm": 0.6422763466835022, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5595, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.3059071729957806, |
|
"grad_norm": 0.7120733857154846, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5635, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.3069620253164557, |
|
"grad_norm": 0.7064751386642456, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5665, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.3080168776371308, |
|
"grad_norm": 0.6330904364585876, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5613, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.3090717299578059, |
|
"grad_norm": 0.6634082794189453, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5545, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.310126582278481, |
|
"grad_norm": 0.6552626490592957, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5622, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.3111814345991561, |
|
"grad_norm": 0.8915601968765259, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5715, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.31223628691983124, |
|
"grad_norm": 0.7891660332679749, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5549, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.31329113924050633, |
|
"grad_norm": 1.112757921218872, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5492, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.3143459915611814, |
|
"grad_norm": 0.5727996826171875, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5524, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.3154008438818565, |
|
"grad_norm": 0.634543240070343, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5638, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.31645569620253167, |
|
"grad_norm": 0.6999568939208984, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5467, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.31751054852320676, |
|
"grad_norm": 0.7002508640289307, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5568, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.31856540084388185, |
|
"grad_norm": 0.7466462254524231, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5604, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.31962025316455694, |
|
"grad_norm": 0.6902084350585938, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5441, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.3206751054852321, |
|
"grad_norm": 0.7047731876373291, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5516, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.3217299578059072, |
|
"grad_norm": 0.7328782677650452, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5474, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.3227848101265823, |
|
"grad_norm": 0.6541309952735901, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5603, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.32383966244725737, |
|
"grad_norm": 0.6323668360710144, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5463, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.32489451476793246, |
|
"grad_norm": 0.6534177660942078, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5422, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.3259493670886076, |
|
"grad_norm": 0.5820841193199158, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5568, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.3270042194092827, |
|
"grad_norm": 0.6260690689086914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.541, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.3280590717299578, |
|
"grad_norm": 0.6181133389472961, |
|
"learning_rate": 0.0015, |
|
"loss": 1.545, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.3291139240506329, |
|
"grad_norm": 0.6371879577636719, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5482, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.33016877637130804, |
|
"grad_norm": 0.7137547135353088, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5609, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.33122362869198313, |
|
"grad_norm": 0.7851402759552002, |
|
"learning_rate": 0.0015, |
|
"loss": 1.536, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.3322784810126582, |
|
"grad_norm": 0.636054277420044, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5477, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.6029794812202454, |
|
"learning_rate": 0.0015, |
|
"loss": 1.546, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.33438818565400846, |
|
"grad_norm": 0.7253355383872986, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5516, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.33544303797468356, |
|
"grad_norm": 0.676753580570221, |
|
"learning_rate": 0.0015, |
|
"loss": 1.544, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.33649789029535865, |
|
"grad_norm": 0.9133493900299072, |
|
"learning_rate": 0.0015, |
|
"loss": 1.547, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.33755274261603374, |
|
"grad_norm": 0.7947659492492676, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5414, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.33860759493670883, |
|
"grad_norm": 0.7425976991653442, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5499, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.339662447257384, |
|
"grad_norm": 0.7035290002822876, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5433, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.3407172995780591, |
|
"grad_norm": 0.7503587007522583, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5366, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.34177215189873417, |
|
"grad_norm": 0.6289841532707214, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5174, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.34282700421940926, |
|
"grad_norm": 0.6338568925857544, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5426, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.3438818565400844, |
|
"grad_norm": 0.6188430190086365, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5321, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.3449367088607595, |
|
"grad_norm": 0.7039831876754761, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5482, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.3459915611814346, |
|
"grad_norm": 0.869328498840332, |
|
"learning_rate": 0.0015, |
|
"loss": 1.547, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.3470464135021097, |
|
"grad_norm": 0.8635297417640686, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5339, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.34810126582278483, |
|
"grad_norm": 0.6145782470703125, |
|
"learning_rate": 0.0015, |
|
"loss": 1.541, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.3491561181434599, |
|
"grad_norm": 0.5749341249465942, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5391, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.350210970464135, |
|
"grad_norm": 0.6416504383087158, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5426, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.3512658227848101, |
|
"grad_norm": 0.6237147450447083, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5337, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.35232067510548526, |
|
"grad_norm": 0.667945921421051, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5364, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.35337552742616035, |
|
"grad_norm": 0.6189672350883484, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5291, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 0.999944269657135, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5309, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.35548523206751054, |
|
"grad_norm": 0.7216845750808716, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5398, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.35654008438818563, |
|
"grad_norm": 0.8774672150611877, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5336, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.3575949367088608, |
|
"grad_norm": 0.581254243850708, |
|
"learning_rate": 0.0015, |
|
"loss": 1.526, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.35864978902953587, |
|
"grad_norm": 0.7107481360435486, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5338, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.35970464135021096, |
|
"grad_norm": 0.6095128059387207, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5324, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.36075949367088606, |
|
"grad_norm": 0.7119369506835938, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5123, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.3618143459915612, |
|
"grad_norm": 0.6254273653030396, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5314, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.3628691983122363, |
|
"grad_norm": 0.6041594743728638, |
|
"learning_rate": 0.0015, |
|
"loss": 1.526, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.3639240506329114, |
|
"grad_norm": 0.6356289982795715, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5249, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.3649789029535865, |
|
"grad_norm": 0.648369312286377, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5175, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.36603375527426163, |
|
"grad_norm": 0.6955825090408325, |
|
"learning_rate": 0.0015, |
|
"loss": 1.534, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.3670886075949367, |
|
"grad_norm": 0.7062328457832336, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5402, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.3681434599156118, |
|
"grad_norm": 0.6580941081047058, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5321, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.3691983122362869, |
|
"grad_norm": 0.7443981170654297, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5156, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.370253164556962, |
|
"grad_norm": 0.683188259601593, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5344, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.37130801687763715, |
|
"grad_norm": 0.602588415145874, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5287, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.37236286919831224, |
|
"grad_norm": 0.669110894203186, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5363, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.37341772151898733, |
|
"grad_norm": 0.9849790930747986, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5338, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.3744725738396624, |
|
"grad_norm": 0.7312303781509399, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5244, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.3755274261603376, |
|
"grad_norm": 0.6108510494232178, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5204, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.37658227848101267, |
|
"grad_norm": 0.9470191597938538, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5208, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.37763713080168776, |
|
"grad_norm": 0.6268520951271057, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5218, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.37869198312236285, |
|
"grad_norm": 0.6916161179542542, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5334, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.379746835443038, |
|
"grad_norm": 0.948076069355011, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5156, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.3808016877637131, |
|
"grad_norm": 0.6372155547142029, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5174, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.3818565400843882, |
|
"grad_norm": 0.7083125114440918, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5203, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.3829113924050633, |
|
"grad_norm": 0.5928717851638794, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5198, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.38396624472573837, |
|
"grad_norm": 0.8729598522186279, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5204, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.3850210970464135, |
|
"grad_norm": 0.7349838614463806, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5304, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.3860759493670886, |
|
"grad_norm": 0.776211678981781, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5231, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.3871308016877637, |
|
"grad_norm": 0.6093047857284546, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5146, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.3881856540084388, |
|
"grad_norm": 0.7260333895683289, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5148, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.38924050632911394, |
|
"grad_norm": 0.6650367975234985, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5251, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.39029535864978904, |
|
"grad_norm": 0.6198450922966003, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5283, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.39135021097046413, |
|
"grad_norm": 0.6064160466194153, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5159, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.3924050632911392, |
|
"grad_norm": 0.7401567697525024, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4973, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.39345991561181437, |
|
"grad_norm": 0.7242253422737122, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5159, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.39451476793248946, |
|
"grad_norm": 0.6363670229911804, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5165, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.39556962025316456, |
|
"grad_norm": 0.7001657485961914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5206, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.39662447257383965, |
|
"grad_norm": 0.9348625540733337, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5117, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.39767932489451474, |
|
"grad_norm": 0.5924307107925415, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5246, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.3987341772151899, |
|
"grad_norm": 0.5479297637939453, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5111, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.399789029535865, |
|
"grad_norm": 0.7290306091308594, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5161, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.4008438818565401, |
|
"grad_norm": 0.6529408097267151, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5201, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.40189873417721517, |
|
"grad_norm": 0.6077266335487366, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5095, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.4029535864978903, |
|
"grad_norm": 0.620369553565979, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5027, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.4040084388185654, |
|
"grad_norm": 0.6094712615013123, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5033, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 0.6363013386726379, |
|
"learning_rate": 0.0015, |
|
"loss": 1.521, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.4061181434599156, |
|
"grad_norm": 0.7066325545310974, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5051, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.40717299578059074, |
|
"grad_norm": 1.0054547786712646, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5167, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.40822784810126583, |
|
"grad_norm": 0.7523656487464905, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5076, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.4092827004219409, |
|
"grad_norm": 0.6852942705154419, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5018, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.410337552742616, |
|
"grad_norm": 0.6333704590797424, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4975, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.41139240506329117, |
|
"grad_norm": 1.0130631923675537, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4979, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.41244725738396626, |
|
"grad_norm": 0.6275448203086853, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5046, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.41350210970464135, |
|
"grad_norm": 0.5929350256919861, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5006, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.41455696202531644, |
|
"grad_norm": 0.730098307132721, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5065, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.41561181434599154, |
|
"grad_norm": 0.6206305027008057, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4993, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.7433390021324158, |
|
"learning_rate": 0.0015, |
|
"loss": 1.503, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.4177215189873418, |
|
"grad_norm": 0.5319532155990601, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5053, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.41877637130801687, |
|
"grad_norm": 0.7162283062934875, |
|
"learning_rate": 0.0015, |
|
"loss": 1.497, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.41983122362869196, |
|
"grad_norm": 0.6116654276847839, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5076, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.4208860759493671, |
|
"grad_norm": 0.6190109252929688, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4991, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.4219409282700422, |
|
"grad_norm": 0.6667494773864746, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5121, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.4229957805907173, |
|
"grad_norm": 0.5939257740974426, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4995, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.4240506329113924, |
|
"grad_norm": 0.6996036767959595, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5032, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.42510548523206754, |
|
"grad_norm": 0.5522063374519348, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4918, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.42616033755274263, |
|
"grad_norm": 0.6450867056846619, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5028, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.4272151898734177, |
|
"grad_norm": 0.5899419784545898, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5052, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.4282700421940928, |
|
"grad_norm": 1.1095937490463257, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5082, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.4293248945147679, |
|
"grad_norm": 0.798089325428009, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5045, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.43037974683544306, |
|
"grad_norm": 0.6162708401679993, |
|
"learning_rate": 0.0015, |
|
"loss": 1.509, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.43143459915611815, |
|
"grad_norm": 1.1070821285247803, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5054, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.43248945147679324, |
|
"grad_norm": 0.5930264592170715, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5009, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.43354430379746833, |
|
"grad_norm": 0.6469377279281616, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4911, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.4345991561181435, |
|
"grad_norm": 0.9273725152015686, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4969, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.4356540084388186, |
|
"grad_norm": 0.6360483765602112, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4906, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.43670886075949367, |
|
"grad_norm": 0.720285177230835, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4938, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.43776371308016876, |
|
"grad_norm": 0.6374127268791199, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4995, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.4388185654008439, |
|
"grad_norm": 0.6002802848815918, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4921, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.439873417721519, |
|
"grad_norm": 0.6103620529174805, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4926, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.4409282700421941, |
|
"grad_norm": 0.6310016512870789, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4883, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.4419831223628692, |
|
"grad_norm": 0.7795982360839844, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4956, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.4430379746835443, |
|
"grad_norm": 0.7573109865188599, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4994, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.4440928270042194, |
|
"grad_norm": 0.8623130917549133, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4871, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.4451476793248945, |
|
"grad_norm": 0.6567066311836243, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4971, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.4462025316455696, |
|
"grad_norm": 0.8647184371948242, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4927, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.4472573839662447, |
|
"grad_norm": 0.7369650602340698, |
|
"learning_rate": 0.0015, |
|
"loss": 1.487, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.44831223628691985, |
|
"grad_norm": 0.5668179392814636, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4929, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.44936708860759494, |
|
"grad_norm": 0.7031323909759521, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5007, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.45042194092827004, |
|
"grad_norm": 0.5766967535018921, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4901, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.45147679324894513, |
|
"grad_norm": 0.826673686504364, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4841, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.4525316455696203, |
|
"grad_norm": 1.1313567161560059, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4965, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.45358649789029537, |
|
"grad_norm": 0.8283265233039856, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4943, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.45464135021097046, |
|
"grad_norm": 0.5803491473197937, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4915, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 0.7237555384635925, |
|
"learning_rate": 0.0015, |
|
"loss": 1.487, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.45675105485232065, |
|
"grad_norm": 0.6318506002426147, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4942, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.4578059071729958, |
|
"grad_norm": 0.6532315015792847, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4865, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.4588607594936709, |
|
"grad_norm": 0.7136020064353943, |
|
"learning_rate": 0.0015, |
|
"loss": 1.487, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.459915611814346, |
|
"grad_norm": 0.617569625377655, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5038, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.4609704641350211, |
|
"grad_norm": 0.6590544581413269, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4819, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.4620253164556962, |
|
"grad_norm": 0.5890007019042969, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4861, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.4630801687763713, |
|
"grad_norm": 0.6877003312110901, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4814, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.4641350210970464, |
|
"grad_norm": 0.6379115581512451, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4887, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.4651898734177215, |
|
"grad_norm": 0.6963144540786743, |
|
"learning_rate": 0.0015, |
|
"loss": 1.5052, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.46624472573839665, |
|
"grad_norm": 0.5845509171485901, |
|
"learning_rate": 0.0015, |
|
"loss": 1.488, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.46729957805907174, |
|
"grad_norm": 0.6566439867019653, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4905, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.46835443037974683, |
|
"grad_norm": 0.7040950059890747, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4852, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.4694092827004219, |
|
"grad_norm": 0.6772409081459045, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4914, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.4704641350210971, |
|
"grad_norm": 0.6106696724891663, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4767, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.47151898734177217, |
|
"grad_norm": 0.5537508130073547, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4899, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.47257383966244726, |
|
"grad_norm": 0.6199619770050049, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4936, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.47362869198312235, |
|
"grad_norm": 0.5953665375709534, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4824, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.47468354430379744, |
|
"grad_norm": 0.6078841686248779, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4793, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.4757383966244726, |
|
"grad_norm": 0.581908106803894, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4864, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.4767932489451477, |
|
"grad_norm": 0.594734251499176, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4692, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.4778481012658228, |
|
"grad_norm": 0.5448296666145325, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4895, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.47890295358649787, |
|
"grad_norm": 0.5612491369247437, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4897, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.479957805907173, |
|
"grad_norm": 0.8511930108070374, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4883, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.4810126582278481, |
|
"grad_norm": 0.5904642939567566, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4751, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.4820675105485232, |
|
"grad_norm": 0.6498848795890808, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4604, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.4831223628691983, |
|
"grad_norm": 0.7043668627738953, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4833, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.48417721518987344, |
|
"grad_norm": 0.6435434222221375, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4686, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.48523206751054854, |
|
"grad_norm": 0.5815362334251404, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4883, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.48628691983122363, |
|
"grad_norm": 1.1824867725372314, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4933, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.4873417721518987, |
|
"grad_norm": 0.6740747690200806, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4695, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.4883966244725738, |
|
"grad_norm": 0.5841023325920105, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4855, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.48945147679324896, |
|
"grad_norm": 0.5732386112213135, |
|
"learning_rate": 0.0015, |
|
"loss": 1.486, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.49050632911392406, |
|
"grad_norm": 0.6099458336830139, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4881, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.49156118143459915, |
|
"grad_norm": 0.6234198212623596, |
|
"learning_rate": 0.0015, |
|
"loss": 1.473, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.49261603375527424, |
|
"grad_norm": 0.718690812587738, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4754, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.4936708860759494, |
|
"grad_norm": 0.626200258731842, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4715, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.4947257383966245, |
|
"grad_norm": 0.6853267550468445, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4684, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.4957805907172996, |
|
"grad_norm": 0.5991837978363037, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4787, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.49683544303797467, |
|
"grad_norm": 0.584218442440033, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4807, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.4978902953586498, |
|
"grad_norm": 0.8109069466590881, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4765, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.4989451476793249, |
|
"grad_norm": 0.7000900506973267, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4744, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6348422169685364, |
|
"learning_rate": 0.0015, |
|
"loss": 1.471, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.5010548523206751, |
|
"grad_norm": 0.7023735046386719, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4909, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.5021097046413502, |
|
"grad_norm": 0.5916067361831665, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4749, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.5031645569620253, |
|
"grad_norm": 0.5802889466285706, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4754, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.5042194092827004, |
|
"grad_norm": 0.5390263795852661, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4852, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.5052742616033755, |
|
"grad_norm": 0.6481983661651611, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4671, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 0.5872080326080322, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4736, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.5073839662447257, |
|
"grad_norm": 0.604631781578064, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4734, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.5084388185654009, |
|
"grad_norm": 0.5989955067634583, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4908, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.509493670886076, |
|
"grad_norm": 0.5893116593360901, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4745, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.510548523206751, |
|
"grad_norm": 0.6147662401199341, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4668, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.5116033755274262, |
|
"grad_norm": 0.7372254729270935, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4799, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.5126582278481012, |
|
"grad_norm": 0.7213212847709656, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4684, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.5137130801687764, |
|
"grad_norm": 0.6522668600082397, |
|
"learning_rate": 0.0015, |
|
"loss": 1.461, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.5147679324894515, |
|
"grad_norm": 0.6512165069580078, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4843, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.5158227848101266, |
|
"grad_norm": 0.6414441466331482, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4746, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.5168776371308017, |
|
"grad_norm": 0.6639628410339355, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4824, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5179324894514767, |
|
"grad_norm": 0.6600837707519531, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4751, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.5189873417721519, |
|
"grad_norm": 0.5745564699172974, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4645, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.520042194092827, |
|
"grad_norm": 0.6705775260925293, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4746, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.5210970464135021, |
|
"grad_norm": 0.6552022695541382, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4714, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.5221518987341772, |
|
"grad_norm": 0.7936440706253052, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4797, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.5232067510548524, |
|
"grad_norm": 0.8443682789802551, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4624, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.5242616033755274, |
|
"grad_norm": 0.5906997919082642, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4693, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.5253164556962026, |
|
"grad_norm": 0.8045083284378052, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4649, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.5263713080168776, |
|
"grad_norm": 0.6744979619979858, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4655, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.5274261603375527, |
|
"grad_norm": 0.5991400480270386, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4783, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5284810126582279, |
|
"grad_norm": 0.6100850701332092, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4692, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.5295358649789029, |
|
"grad_norm": 0.6038932204246521, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4642, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.5305907172995781, |
|
"grad_norm": 0.5445556044578552, |
|
"learning_rate": 0.0015, |
|
"loss": 1.45, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.5316455696202531, |
|
"grad_norm": 0.57991623878479, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4777, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.5327004219409283, |
|
"grad_norm": 0.5831894874572754, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4645, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.5337552742616034, |
|
"grad_norm": 0.6404194831848145, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4759, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.5348101265822784, |
|
"grad_norm": 0.9784829616546631, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4649, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.5358649789029536, |
|
"grad_norm": 0.6670748591423035, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4806, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.5369198312236287, |
|
"grad_norm": 0.6233241558074951, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4695, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.5379746835443038, |
|
"grad_norm": 0.574617862701416, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4522, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.5390295358649789, |
|
"grad_norm": 0.6161243319511414, |
|
"learning_rate": 0.0015, |
|
"loss": 1.466, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.540084388185654, |
|
"grad_norm": 0.568108320236206, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4642, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.5411392405063291, |
|
"grad_norm": 0.7579213380813599, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4634, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.5421940928270043, |
|
"grad_norm": 0.9048457741737366, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4753, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.5432489451476793, |
|
"grad_norm": 0.6220145225524902, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4716, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.5443037974683544, |
|
"grad_norm": 0.681603193283081, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4663, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.5453586497890295, |
|
"grad_norm": 0.5756487250328064, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4663, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.5464135021097046, |
|
"grad_norm": 1.1531461477279663, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4779, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.5474683544303798, |
|
"grad_norm": 0.9628408551216125, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4595, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.5485232067510548, |
|
"grad_norm": 0.9518396258354187, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4728, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.54957805907173, |
|
"grad_norm": 0.6302949786186218, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4565, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.5506329113924051, |
|
"grad_norm": 0.6904149055480957, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4664, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.5516877637130801, |
|
"grad_norm": 0.749798059463501, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4591, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.5527426160337553, |
|
"grad_norm": 0.7355242967605591, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4668, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.5537974683544303, |
|
"grad_norm": 0.6457265019416809, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4608, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.5548523206751055, |
|
"grad_norm": 0.6021826267242432, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4623, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.5559071729957806, |
|
"grad_norm": 0.5744286179542542, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4725, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.5569620253164557, |
|
"grad_norm": 0.6601939797401428, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4617, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.5580168776371308, |
|
"grad_norm": 0.6193671822547913, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4621, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.5590717299578059, |
|
"grad_norm": 0.571906566619873, |
|
"learning_rate": 0.0015, |
|
"loss": 1.467, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.560126582278481, |
|
"grad_norm": 0.7392724752426147, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4524, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.5611814345991561, |
|
"grad_norm": 0.6911517381668091, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4545, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.5622362869198312, |
|
"grad_norm": 0.6782168745994568, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4654, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.5632911392405063, |
|
"grad_norm": 0.5899258255958557, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4578, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.5643459915611815, |
|
"grad_norm": 0.5885708332061768, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4513, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.5654008438818565, |
|
"grad_norm": 0.8930160403251648, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4609, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.5664556962025317, |
|
"grad_norm": 0.6113539338111877, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4618, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.5675105485232067, |
|
"grad_norm": 0.669581949710846, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4553, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.5685654008438819, |
|
"grad_norm": 0.6713536977767944, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4544, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.569620253164557, |
|
"grad_norm": 0.5978567600250244, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4546, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.570675105485232, |
|
"grad_norm": 0.5797361135482788, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4573, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.5717299578059072, |
|
"grad_norm": 0.6175971031188965, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4535, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.5727848101265823, |
|
"grad_norm": 0.6148843169212341, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4503, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.5738396624472574, |
|
"grad_norm": 0.9311176538467407, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4473, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.5748945147679325, |
|
"grad_norm": 0.7705535888671875, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4575, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.5759493670886076, |
|
"grad_norm": 0.8579835295677185, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4583, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.5770042194092827, |
|
"grad_norm": 0.6973420977592468, |
|
"learning_rate": 0.0015, |
|
"loss": 1.456, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.5780590717299579, |
|
"grad_norm": 0.7770690321922302, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4589, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.5791139240506329, |
|
"grad_norm": 0.554744303226471, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4625, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.580168776371308, |
|
"grad_norm": 0.6639339327812195, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4529, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.5812236286919831, |
|
"grad_norm": 0.6472654938697815, |
|
"learning_rate": 0.0015, |
|
"loss": 1.462, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.5822784810126582, |
|
"grad_norm": 0.6235995888710022, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4517, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.5833333333333334, |
|
"grad_norm": 0.599632740020752, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4477, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.5843881856540084, |
|
"grad_norm": 0.709097683429718, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4439, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.5854430379746836, |
|
"grad_norm": 0.6987321972846985, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4604, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.5864978902953587, |
|
"grad_norm": 0.6053097248077393, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4668, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.5875527426160337, |
|
"grad_norm": 0.5671143531799316, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4516, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.5886075949367089, |
|
"grad_norm": 0.7676701545715332, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4496, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.5896624472573839, |
|
"grad_norm": 0.6224042773246765, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4566, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.5907172995780591, |
|
"grad_norm": 0.5750153660774231, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4503, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.5917721518987342, |
|
"grad_norm": 0.552209734916687, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4529, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.5928270042194093, |
|
"grad_norm": 0.5752233266830444, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4632, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.5938818565400844, |
|
"grad_norm": 0.8984862565994263, |
|
"learning_rate": 0.0015, |
|
"loss": 1.456, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.5949367088607594, |
|
"grad_norm": 0.5493113994598389, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4646, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.5959915611814346, |
|
"grad_norm": 0.6208711862564087, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4606, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.5970464135021097, |
|
"grad_norm": 0.7052538394927979, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4524, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.5981012658227848, |
|
"grad_norm": 0.5871936082839966, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4602, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.5991561181434599, |
|
"grad_norm": 0.6055148839950562, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4607, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.6002109704641351, |
|
"grad_norm": 0.8056248426437378, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4431, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.6012658227848101, |
|
"grad_norm": 0.6749975681304932, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4484, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.6023206751054853, |
|
"grad_norm": 0.6684216260910034, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4514, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.6033755274261603, |
|
"grad_norm": 0.571518063545227, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4551, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.6044303797468354, |
|
"grad_norm": 0.6335450410842896, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4532, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.6054852320675106, |
|
"grad_norm": 0.6443816423416138, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4498, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.6065400843881856, |
|
"grad_norm": 0.6893118619918823, |
|
"learning_rate": 0.0015, |
|
"loss": 1.426, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 0.6394256949424744, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4362, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.6086497890295358, |
|
"grad_norm": 0.7784489989280701, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4374, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.609704641350211, |
|
"grad_norm": 0.5581979751586914, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4459, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.6107594936708861, |
|
"grad_norm": 0.6324455142021179, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4438, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.6118143459915611, |
|
"grad_norm": 0.5833821296691895, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4466, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.6128691983122363, |
|
"grad_norm": 0.5692071914672852, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4517, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.6139240506329114, |
|
"grad_norm": 0.5617991089820862, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4653, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.6149789029535865, |
|
"grad_norm": 0.5704951286315918, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4463, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.6160337552742616, |
|
"grad_norm": 0.655717670917511, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4578, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.6170886075949367, |
|
"grad_norm": 0.7156020998954773, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4613, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.6181434599156118, |
|
"grad_norm": 0.6333968043327332, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4486, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.619198312236287, |
|
"grad_norm": 0.5942566394805908, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4431, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.620253164556962, |
|
"grad_norm": 0.576947808265686, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4536, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.6213080168776371, |
|
"grad_norm": 0.5468796491622925, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4464, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.6223628691983122, |
|
"grad_norm": 0.5699398517608643, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4447, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.6234177215189873, |
|
"grad_norm": 0.7965925335884094, |
|
"learning_rate": 0.0015, |
|
"loss": 1.44, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.6244725738396625, |
|
"grad_norm": 0.5579686164855957, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4365, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.6255274261603375, |
|
"grad_norm": 1.3399968147277832, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4457, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.6265822784810127, |
|
"grad_norm": 0.7462083101272583, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4512, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.6276371308016878, |
|
"grad_norm": 0.5861807465553284, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4385, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.6286919831223629, |
|
"grad_norm": 0.6418489217758179, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4457, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.629746835443038, |
|
"grad_norm": 0.556674599647522, |
|
"learning_rate": 0.0015, |
|
"loss": 1.445, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.630801687763713, |
|
"grad_norm": 0.6408023834228516, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4308, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.6318565400843882, |
|
"grad_norm": 0.6888174414634705, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4525, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.6329113924050633, |
|
"grad_norm": 0.6021018624305725, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4571, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.6339662447257384, |
|
"grad_norm": 0.5619707703590393, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4435, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.6350210970464135, |
|
"grad_norm": 0.909877598285675, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4468, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.6360759493670886, |
|
"grad_norm": 0.5683539509773254, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4294, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.6371308016877637, |
|
"grad_norm": 0.7239286303520203, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4474, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.6381856540084389, |
|
"grad_norm": 0.6010700464248657, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4298, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.6392405063291139, |
|
"grad_norm": 0.6637555956840515, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4497, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.640295358649789, |
|
"grad_norm": 0.8356103301048279, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4417, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.6413502109704642, |
|
"grad_norm": 0.535122811794281, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4399, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.6424050632911392, |
|
"grad_norm": 0.6084794402122498, |
|
"learning_rate": 0.0015, |
|
"loss": 1.441, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.6434599156118144, |
|
"grad_norm": 0.7110099196434021, |
|
"learning_rate": 0.0015, |
|
"loss": 1.436, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.6445147679324894, |
|
"grad_norm": 0.8643055558204651, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4471, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.6455696202531646, |
|
"grad_norm": 0.6604209542274475, |
|
"learning_rate": 0.0015, |
|
"loss": 1.441, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.6466244725738397, |
|
"grad_norm": 0.7761085033416748, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4397, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.6476793248945147, |
|
"grad_norm": 0.8981795907020569, |
|
"learning_rate": 0.0015, |
|
"loss": 1.441, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.6487341772151899, |
|
"grad_norm": 0.618600606918335, |
|
"learning_rate": 0.0015, |
|
"loss": 1.452, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.6497890295358649, |
|
"grad_norm": 0.6228238344192505, |
|
"learning_rate": 0.0015, |
|
"loss": 1.4449, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.6508438818565401, |
|
"grad_norm": 0.6530385613441467, |
|
"learning_rate": 0.0014854972418331944, |
|
"loss": 1.4227, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.6518987341772152, |
|
"grad_norm": 0.6597455143928528, |
|
"learning_rate": 0.0014650219182191931, |
|
"loss": 1.4444, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.6529535864978903, |
|
"grad_norm": 0.5946850180625916, |
|
"learning_rate": 0.001444828815847542, |
|
"loss": 1.4454, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.6540084388185654, |
|
"grad_norm": 0.5537338852882385, |
|
"learning_rate": 0.0014249140447269945, |
|
"loss": 1.4366, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.6550632911392406, |
|
"grad_norm": 0.5353566408157349, |
|
"learning_rate": 0.0014052737684839257, |
|
"loss": 1.4298, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.6561181434599156, |
|
"grad_norm": 0.6548413634300232, |
|
"learning_rate": 0.0013859042036232954, |
|
"loss": 1.4369, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.6571729957805907, |
|
"grad_norm": 0.6430940628051758, |
|
"learning_rate": 0.001366801618799797, |
|
"loss": 1.4264, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.6582278481012658, |
|
"grad_norm": 0.5876358151435852, |
|
"learning_rate": 0.001347962334099052, |
|
"loss": 1.4287, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.6592827004219409, |
|
"grad_norm": 0.5104976892471313, |
|
"learning_rate": 0.0013293827203287143, |
|
"loss": 1.4349, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.6603375527426161, |
|
"grad_norm": 0.6101750731468201, |
|
"learning_rate": 0.0013110591983193423, |
|
"loss": 1.4252, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.6613924050632911, |
|
"grad_norm": 0.6192573308944702, |
|
"learning_rate": 0.0012929882382349102, |
|
"loss": 1.4322, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.6624472573839663, |
|
"grad_norm": 0.5790297985076904, |
|
"learning_rate": 0.0012751663588928214, |
|
"loss": 1.4305, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.6635021097046413, |
|
"grad_norm": 0.5484774708747864, |
|
"learning_rate": 0.0012575901270932943, |
|
"loss": 1.4249, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.6645569620253164, |
|
"grad_norm": 0.7660226225852966, |
|
"learning_rate": 0.0012402561569579936, |
|
"loss": 1.4179, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.6656118143459916, |
|
"grad_norm": 0.6046931743621826, |
|
"learning_rate": 0.0012231611092777745, |
|
"loss": 1.4163, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.6635903716087341, |
|
"learning_rate": 0.0012063016908694193, |
|
"loss": 1.4123, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.6677215189873418, |
|
"grad_norm": 0.6356135606765747, |
|
"learning_rate": 0.0011896746539412405, |
|
"loss": 1.4192, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.6687763713080169, |
|
"grad_norm": 0.6040505766868591, |
|
"learning_rate": 0.0011732767954674265, |
|
"loss": 1.4101, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.669831223628692, |
|
"grad_norm": 0.5147744417190552, |
|
"learning_rate": 0.0011571049565710122, |
|
"loss": 1.416, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.6708860759493671, |
|
"grad_norm": 0.5723498463630676, |
|
"learning_rate": 0.001141156021915355, |
|
"loss": 1.413, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.6719409282700421, |
|
"grad_norm": 0.6113776564598083, |
|
"learning_rate": 0.001125426919103997, |
|
"loss": 1.3941, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.6729957805907173, |
|
"grad_norm": 0.6992884278297424, |
|
"learning_rate": 0.001109914618088799, |
|
"loss": 1.3988, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.6740506329113924, |
|
"grad_norm": 0.7204107642173767, |
|
"learning_rate": 0.0010946161305862348, |
|
"loss": 1.4129, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.6751054852320675, |
|
"grad_norm": 0.615075945854187, |
|
"learning_rate": 0.001079528509501728, |
|
"loss": 1.4092, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.6761603375527426, |
|
"grad_norm": 0.5387037992477417, |
|
"learning_rate": 0.0010646488483619261, |
|
"loss": 1.4039, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.6772151898734177, |
|
"grad_norm": 0.6567428112030029, |
|
"learning_rate": 0.0010499742807547976, |
|
"loss": 1.391, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.6782700421940928, |
|
"grad_norm": 0.5510596632957458, |
|
"learning_rate": 0.0010355019797774478, |
|
"loss": 1.3992, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.679324894514768, |
|
"grad_norm": 0.5978795886039734, |
|
"learning_rate": 0.001021229157491546, |
|
"loss": 1.3851, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.680379746835443, |
|
"grad_norm": 0.5756472945213318, |
|
"learning_rate": 0.0010071530643862578, |
|
"loss": 1.3883, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.6814345991561181, |
|
"grad_norm": 0.5662719011306763, |
|
"learning_rate": 0.000993270988848579, |
|
"loss": 1.4016, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.6824894514767933, |
|
"grad_norm": 0.7225199341773987, |
|
"learning_rate": 0.0009795802566409742, |
|
"loss": 1.3955, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.6835443037974683, |
|
"grad_norm": 0.7410444021224976, |
|
"learning_rate": 0.0009660782303862109, |
|
"loss": 1.4011, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.6845991561181435, |
|
"grad_norm": 0.5279145240783691, |
|
"learning_rate": 0.0009527623090592963, |
|
"loss": 1.3929, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.6856540084388185, |
|
"grad_norm": 0.528181254863739, |
|
"learning_rate": 0.0009396299274864177, |
|
"loss": 1.405, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.6867088607594937, |
|
"grad_norm": 0.6095656752586365, |
|
"learning_rate": 0.0009266785558507877, |
|
"loss": 1.404, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.6877637130801688, |
|
"grad_norm": 0.5902137160301208, |
|
"learning_rate": 0.0009139056992053016, |
|
"loss": 1.3896, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.6888185654008439, |
|
"grad_norm": 0.7211180329322815, |
|
"learning_rate": 0.000901308896991912, |
|
"loss": 1.3808, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.689873417721519, |
|
"grad_norm": 0.8163548707962036, |
|
"learning_rate": 0.000888885722567627, |
|
"loss": 1.3922, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.6909282700421941, |
|
"grad_norm": 0.6644204258918762, |
|
"learning_rate": 0.0008766337827370438, |
|
"loss": 1.386, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.6919831223628692, |
|
"grad_norm": 0.6455613374710083, |
|
"learning_rate": 0.000864550717291324, |
|
"loss": 1.3859, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.6930379746835443, |
|
"grad_norm": 0.5053851008415222, |
|
"learning_rate": 0.0008526341985535229, |
|
"loss": 1.382, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.6940928270042194, |
|
"grad_norm": 0.5896329879760742, |
|
"learning_rate": 0.0008408819309301891, |
|
"loss": 1.3773, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.6951476793248945, |
|
"grad_norm": 0.6229731440544128, |
|
"learning_rate": 0.0008292916504691397, |
|
"loss": 1.3797, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.6962025316455697, |
|
"grad_norm": 0.5269219875335693, |
|
"learning_rate": 0.0008178611244233354, |
|
"loss": 1.3848, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.6972573839662447, |
|
"grad_norm": 0.5342105627059937, |
|
"learning_rate": 0.0008065881508207637, |
|
"loss": 1.371, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.6983122362869199, |
|
"grad_norm": 0.517015814781189, |
|
"learning_rate": 0.0007954705580402523, |
|
"loss": 1.3796, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.6993670886075949, |
|
"grad_norm": 0.5653436779975891, |
|
"learning_rate": 0.0007845062043931298, |
|
"loss": 1.3728, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.70042194092827, |
|
"grad_norm": 0.6157445907592773, |
|
"learning_rate": 0.0007736929777106497, |
|
"loss": 1.3777, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.7014767932489452, |
|
"grad_norm": 0.6121566891670227, |
|
"learning_rate": 0.000763028794937105, |
|
"loss": 1.3695, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.7025316455696202, |
|
"grad_norm": 0.664849042892456, |
|
"learning_rate": 0.0007525116017285476, |
|
"loss": 1.3687, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.7035864978902954, |
|
"grad_norm": 0.5752630829811096, |
|
"learning_rate": 0.0007421393720570417, |
|
"loss": 1.3722, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.7046413502109705, |
|
"grad_norm": 0.5371103286743164, |
|
"learning_rate": 0.0007319101078203694, |
|
"loss": 1.3717, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.7056962025316456, |
|
"grad_norm": 0.48518502712249756, |
|
"learning_rate": 0.0007218218384571178, |
|
"loss": 1.3626, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.7067510548523207, |
|
"grad_norm": 0.536507785320282, |
|
"learning_rate": 0.0007118726205670703, |
|
"loss": 1.3677, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.7078059071729957, |
|
"grad_norm": 0.5858027338981628, |
|
"learning_rate": 0.0007020605375368316, |
|
"loss": 1.368, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 0.6124630570411682, |
|
"learning_rate": 0.000692383699170611, |
|
"loss": 1.3582, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.709915611814346, |
|
"grad_norm": 0.5956146121025085, |
|
"learning_rate": 0.0006828402413260966, |
|
"loss": 1.3653, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.7109704641350211, |
|
"grad_norm": 0.5350100994110107, |
|
"learning_rate": 0.0006734283255553471, |
|
"loss": 1.3673, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.7120253164556962, |
|
"grad_norm": 0.5265085101127625, |
|
"learning_rate": 0.0006641461387506347, |
|
"loss": 1.3673, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.7130801687763713, |
|
"grad_norm": 0.6775269508361816, |
|
"learning_rate": 0.0006549918927951678, |
|
"loss": 1.3631, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.7141350210970464, |
|
"grad_norm": 0.5426746010780334, |
|
"learning_rate": 0.0006459638242186297, |
|
"loss": 1.3622, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.7151898734177216, |
|
"grad_norm": 0.7744086384773254, |
|
"learning_rate": 0.0006370601938574639, |
|
"loss": 1.3627, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.7162447257383966, |
|
"grad_norm": 0.5614804625511169, |
|
"learning_rate": 0.0006282792865198421, |
|
"loss": 1.368, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.7172995780590717, |
|
"grad_norm": 0.5236024856567383, |
|
"learning_rate": 0.0006196194106552512, |
|
"loss": 1.3531, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.7183544303797469, |
|
"grad_norm": 0.8842533230781555, |
|
"learning_rate": 0.0006110788980286328, |
|
"loss": 1.3547, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.7194092827004219, |
|
"grad_norm": 0.5572026968002319, |
|
"learning_rate": 0.0006026561033990158, |
|
"loss": 1.3566, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.7204641350210971, |
|
"grad_norm": 0.5783604383468628, |
|
"learning_rate": 0.000594349404202577, |
|
"loss": 1.3535, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.7215189873417721, |
|
"grad_norm": 0.615972101688385, |
|
"learning_rate": 0.0005861572002400716, |
|
"loss": 1.3443, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.7225738396624473, |
|
"grad_norm": 0.5398380160331726, |
|
"learning_rate": 0.0005780779133685717, |
|
"loss": 1.3543, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.7236286919831224, |
|
"grad_norm": 0.5902853012084961, |
|
"learning_rate": 0.0005701099871974524, |
|
"loss": 1.3496, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.7246835443037974, |
|
"grad_norm": 0.5017073154449463, |
|
"learning_rate": 0.0005622518867885708, |
|
"loss": 1.3454, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.7257383966244726, |
|
"grad_norm": 0.6057908535003662, |
|
"learning_rate": 0.0005545020983605748, |
|
"loss": 1.3491, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.7267932489451476, |
|
"grad_norm": 0.6111939549446106, |
|
"learning_rate": 0.0005468591289972898, |
|
"loss": 1.3415, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.7278481012658228, |
|
"grad_norm": 0.6225577592849731, |
|
"learning_rate": 0.0005393215063601232, |
|
"loss": 1.3422, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.7289029535864979, |
|
"grad_norm": 0.5773703455924988, |
|
"learning_rate": 0.0005318877784044343, |
|
"loss": 1.366, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.729957805907173, |
|
"grad_norm": 0.5412142276763916, |
|
"learning_rate": 0.0005245565130998126, |
|
"loss": 1.3439, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.7310126582278481, |
|
"grad_norm": 0.5136180520057678, |
|
"learning_rate": 0.000517326298154212, |
|
"loss": 1.3519, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.7320675105485233, |
|
"grad_norm": 0.6220201253890991, |
|
"learning_rate": 0.0005101957407418877, |
|
"loss": 1.3533, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.7331223628691983, |
|
"grad_norm": 0.5175100564956665, |
|
"learning_rate": 0.0005031634672350829, |
|
"loss": 1.3507, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.7341772151898734, |
|
"grad_norm": 0.5581703186035156, |
|
"learning_rate": 0.0004962281229394129, |
|
"loss": 1.3394, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.7352320675105485, |
|
"grad_norm": 0.5413421988487244, |
|
"learning_rate": 0.0004893883718328983, |
|
"loss": 1.3362, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.7362869198312236, |
|
"grad_norm": 0.5695057511329651, |
|
"learning_rate": 0.0004826428963085938, |
|
"loss": 1.3374, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.7373417721518988, |
|
"grad_norm": 0.5546701550483704, |
|
"learning_rate": 0.00047599039692076457, |
|
"loss": 1.3393, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.7383966244725738, |
|
"grad_norm": 0.6359083652496338, |
|
"learning_rate": 0.0004694295921345622, |
|
"loss": 1.3502, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.739451476793249, |
|
"grad_norm": 0.5230035185813904, |
|
"learning_rate": 0.00046295921807915015, |
|
"loss": 1.3184, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.740506329113924, |
|
"grad_norm": 0.7573515176773071, |
|
"learning_rate": 0.00045657802830423164, |
|
"loss": 1.335, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.7415611814345991, |
|
"grad_norm": 0.5882290601730347, |
|
"learning_rate": 0.00045028479353993473, |
|
"loss": 1.3413, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.7426160337552743, |
|
"grad_norm": 0.6509057879447937, |
|
"learning_rate": 0.00044407830146000587, |
|
"loss": 1.3442, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.7436708860759493, |
|
"grad_norm": 0.6262148022651672, |
|
"learning_rate": 0.0004379573564482676, |
|
"loss": 1.3302, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.7447257383966245, |
|
"grad_norm": 0.6157950758934021, |
|
"learning_rate": 0.0004319207793682963, |
|
"loss": 1.3381, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.7457805907172996, |
|
"grad_norm": 0.609734058380127, |
|
"learning_rate": 0.0004259674073362731, |
|
"loss": 1.3424, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.7468354430379747, |
|
"grad_norm": 0.5529407858848572, |
|
"learning_rate": 0.00042009609349696626, |
|
"loss": 1.3388, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.7478902953586498, |
|
"grad_norm": 0.5082640647888184, |
|
"learning_rate": 0.00041430570680280233, |
|
"loss": 1.3383, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.7489451476793249, |
|
"grad_norm": 0.5488715767860413, |
|
"learning_rate": 0.0004085951317959809, |
|
"loss": 1.3336, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6439418196678162, |
|
"learning_rate": 0.00040296326839359315, |
|
"loss": 1.3415, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.7510548523206751, |
|
"grad_norm": 0.5768157839775085, |
|
"learning_rate": 0.000397409031675703, |
|
"loss": 1.3344, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.7521097046413502, |
|
"grad_norm": 0.5306650400161743, |
|
"learning_rate": 0.00039193135167634786, |
|
"loss": 1.3451, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.7531645569620253, |
|
"grad_norm": 0.5529879331588745, |
|
"learning_rate": 0.00038652917317742123, |
|
"loss": 1.3256, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.7542194092827004, |
|
"grad_norm": 0.5800896883010864, |
|
"learning_rate": 0.0003812014555053956, |
|
"loss": 1.3233, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.7552742616033755, |
|
"grad_norm": 0.5231634974479675, |
|
"learning_rate": 0.00037594717233084774, |
|
"loss": 1.3324, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.7563291139240507, |
|
"grad_norm": 0.651831328868866, |
|
"learning_rate": 0.0003707653114707471, |
|
"loss": 1.3397, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.7573839662447257, |
|
"grad_norm": 0.5566461682319641, |
|
"learning_rate": 0.00036565487469346906, |
|
"loss": 1.318, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.7584388185654009, |
|
"grad_norm": 0.5578268766403198, |
|
"learning_rate": 0.0003606148775264958, |
|
"loss": 1.3212, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 0.5183534026145935, |
|
"learning_rate": 0.0003556443490667684, |
|
"loss": 1.3266, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.760548523206751, |
|
"grad_norm": 0.570838451385498, |
|
"learning_rate": 0.0003507423317936521, |
|
"loss": 1.3278, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.7616033755274262, |
|
"grad_norm": 0.6385667324066162, |
|
"learning_rate": 0.00034590788138448006, |
|
"loss": 1.3432, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.7626582278481012, |
|
"grad_norm": 0.6211504936218262, |
|
"learning_rate": 0.0003411400665326393, |
|
"loss": 1.3395, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.7637130801687764, |
|
"grad_norm": 0.5946642756462097, |
|
"learning_rate": 0.00033643796876816424, |
|
"loss": 1.3346, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.7647679324894515, |
|
"grad_norm": 0.5230995416641235, |
|
"learning_rate": 0.000331800682280803, |
|
"loss": 1.3279, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.7658227848101266, |
|
"grad_norm": 0.48349809646606445, |
|
"learning_rate": 0.0003272273137455226, |
|
"loss": 1.3241, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.7668776371308017, |
|
"grad_norm": 0.5506454706192017, |
|
"learning_rate": 0.00032271698215041863, |
|
"loss": 1.3225, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.7679324894514767, |
|
"grad_norm": 0.5584042072296143, |
|
"learning_rate": 0.0003182688186269984, |
|
"loss": 1.3249, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.7689873417721519, |
|
"grad_norm": 0.5269228219985962, |
|
"learning_rate": 0.0003138819662828017, |
|
"loss": 1.3336, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.770042194092827, |
|
"grad_norm": 0.5732347965240479, |
|
"learning_rate": 0.00030955558003632966, |
|
"loss": 1.3391, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.7710970464135021, |
|
"grad_norm": 0.5150391459465027, |
|
"learning_rate": 0.0003052888264542483, |
|
"loss": 1.3206, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.7721518987341772, |
|
"grad_norm": 0.4965534210205078, |
|
"learning_rate": 0.0003010808835908368, |
|
"loss": 1.317, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.7732067510548524, |
|
"grad_norm": 0.5015170574188232, |
|
"learning_rate": 0.00029693094082964785, |
|
"loss": 1.3197, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.7742616033755274, |
|
"grad_norm": 0.5498356223106384, |
|
"learning_rate": 0.0002928381987273508, |
|
"loss": 1.3186, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.7753164556962026, |
|
"grad_norm": 0.5097297430038452, |
|
"learning_rate": 0.0002888018688597272, |
|
"loss": 1.3147, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.7763713080168776, |
|
"grad_norm": 0.5277174711227417, |
|
"learning_rate": 0.0002848211736697894, |
|
"loss": 1.3217, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.7774261603375527, |
|
"grad_norm": 0.48281005024909973, |
|
"learning_rate": 0.00028089534631799183, |
|
"loss": 1.3249, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.7784810126582279, |
|
"grad_norm": 0.5234740972518921, |
|
"learning_rate": 0.0002770236305345076, |
|
"loss": 1.3109, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.7795358649789029, |
|
"grad_norm": 0.5207615494728088, |
|
"learning_rate": 0.00027320528047354093, |
|
"loss": 1.3201, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.7805907172995781, |
|
"grad_norm": 0.5451726317405701, |
|
"learning_rate": 0.00026943956056964773, |
|
"loss": 1.3251, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.7816455696202531, |
|
"grad_norm": 0.5427468419075012, |
|
"learning_rate": 0.0002657257453960364, |
|
"loss": 1.3066, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.7827004219409283, |
|
"grad_norm": 0.537834882736206, |
|
"learning_rate": 0.0002620631195248222, |
|
"loss": 1.3124, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.7837552742616034, |
|
"grad_norm": 0.5034371018409729, |
|
"learning_rate": 0.00025845097738920735, |
|
"loss": 1.2992, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.7848101265822784, |
|
"grad_norm": 0.5310587882995605, |
|
"learning_rate": 0.0002548886231475606, |
|
"loss": 1.3035, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.7858649789029536, |
|
"grad_norm": 0.49480363726615906, |
|
"learning_rate": 0.0002513753705493713, |
|
"loss": 1.3082, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.7869198312236287, |
|
"grad_norm": 0.5269278883934021, |
|
"learning_rate": 0.0002479105428030497, |
|
"loss": 1.3152, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.7879746835443038, |
|
"grad_norm": 0.5210197567939758, |
|
"learning_rate": 0.00024449347244555043, |
|
"loss": 1.3076, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.7890295358649789, |
|
"grad_norm": 0.5343834161758423, |
|
"learning_rate": 0.00024112350121379254, |
|
"loss": 1.3157, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.790084388185654, |
|
"grad_norm": 0.5516914129257202, |
|
"learning_rate": 0.000237799979917852, |
|
"loss": 1.319, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.7911392405063291, |
|
"grad_norm": 0.5483276844024658, |
|
"learning_rate": 0.00023452226831590227, |
|
"loss": 1.3158, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.7921940928270043, |
|
"grad_norm": 0.5133262276649475, |
|
"learning_rate": 0.00023128973499087779, |
|
"loss": 1.3219, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.7932489451476793, |
|
"grad_norm": 0.473877489566803, |
|
"learning_rate": 0.00022810175722883858, |
|
"loss": 1.3069, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.7943037974683544, |
|
"grad_norm": 0.5395793318748474, |
|
"learning_rate": 0.0002249577208990106, |
|
"loss": 1.307, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.7953586497890295, |
|
"grad_norm": 0.6155946850776672, |
|
"learning_rate": 0.00022185702033547996, |
|
"loss": 1.3048, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.7964135021097046, |
|
"grad_norm": 0.518414318561554, |
|
"learning_rate": 0.00021879905822051756, |
|
"loss": 1.3012, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.7974683544303798, |
|
"grad_norm": 0.6070139408111572, |
|
"learning_rate": 0.00021578324546951222, |
|
"loss": 1.3049, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.7985232067510548, |
|
"grad_norm": 0.543787956237793, |
|
"learning_rate": 0.00021280900111748948, |
|
"loss": 1.3144, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.79957805907173, |
|
"grad_norm": 0.5364104509353638, |
|
"learning_rate": 0.00020987575220719483, |
|
"loss": 1.3108, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.8006329113924051, |
|
"grad_norm": 0.4920696020126343, |
|
"learning_rate": 0.00020698293367871933, |
|
"loss": 1.3025, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.8016877637130801, |
|
"grad_norm": 0.5035790205001831, |
|
"learning_rate": 0.00020412998826064692, |
|
"loss": 1.3055, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.8027426160337553, |
|
"grad_norm": 0.5044794082641602, |
|
"learning_rate": 0.00020131636636270178, |
|
"loss": 1.3191, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.8037974683544303, |
|
"grad_norm": 0.5351099967956543, |
|
"learning_rate": 0.00019854152596987523, |
|
"loss": 1.2932, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.8048523206751055, |
|
"grad_norm": 0.5535302758216858, |
|
"learning_rate": 0.00019580493253801255, |
|
"loss": 1.3155, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.8059071729957806, |
|
"grad_norm": 0.5851619243621826, |
|
"learning_rate": 0.00019310605889083838, |
|
"loss": 1.3069, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.8069620253164557, |
|
"grad_norm": 0.5154551863670349, |
|
"learning_rate": 0.0001904443851184018, |
|
"loss": 1.3231, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.8080168776371308, |
|
"grad_norm": 0.5179932713508606, |
|
"learning_rate": 0.00018781939847692096, |
|
"loss": 1.2996, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.8090717299578059, |
|
"grad_norm": 0.5227841138839722, |
|
"learning_rate": 0.00018523059329000844, |
|
"loss": 1.3072, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 0.6099350452423096, |
|
"learning_rate": 0.0001826774708512579, |
|
"loss": 1.32, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.8111814345991561, |
|
"grad_norm": 0.6272170543670654, |
|
"learning_rate": 0.00018015953932817348, |
|
"loss": 1.2945, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.8122362869198312, |
|
"grad_norm": 0.5071361660957336, |
|
"learning_rate": 0.00017767631366742332, |
|
"loss": 1.303, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.8132911392405063, |
|
"grad_norm": 0.6405959129333496, |
|
"learning_rate": 0.00017522731550139922, |
|
"loss": 1.3002, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.8143459915611815, |
|
"grad_norm": 0.5483779311180115, |
|
"learning_rate": 0.00017281207305606407, |
|
"loss": 1.3052, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.8154008438818565, |
|
"grad_norm": 0.546625554561615, |
|
"learning_rate": 0.00017043012106006926, |
|
"loss": 1.3108, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.8164556962025317, |
|
"grad_norm": 0.520866870880127, |
|
"learning_rate": 0.00016808100065512528, |
|
"loss": 1.3116, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.8175105485232067, |
|
"grad_norm": 0.4994986057281494, |
|
"learning_rate": 0.00016576425930760734, |
|
"loss": 1.2978, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.8185654008438819, |
|
"grad_norm": 0.5830324292182922, |
|
"learning_rate": 0.00016347945072137934, |
|
"loss": 1.2968, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.819620253164557, |
|
"grad_norm": 0.5204963088035583, |
|
"learning_rate": 0.00016122613475181977, |
|
"loss": 1.3057, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.820675105485232, |
|
"grad_norm": 0.6280856728553772, |
|
"learning_rate": 0.00015900387732103232, |
|
"loss": 1.2982, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.8217299578059072, |
|
"grad_norm": 0.5684712529182434, |
|
"learning_rate": 0.00015681225033422526, |
|
"loss": 1.314, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.8227848101265823, |
|
"grad_norm": 0.4693601429462433, |
|
"learning_rate": 0.00015465083159724345, |
|
"loss": 1.3037, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.8238396624472574, |
|
"grad_norm": 0.5411034226417542, |
|
"learning_rate": 0.0001525192047352371, |
|
"loss": 1.3003, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.8248945147679325, |
|
"grad_norm": 0.5254245400428772, |
|
"learning_rate": 0.00015041695911245136, |
|
"loss": 1.3112, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.8259493670886076, |
|
"grad_norm": 0.513707160949707, |
|
"learning_rate": 0.00014834368975312172, |
|
"loss": 1.2982, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.8270042194092827, |
|
"grad_norm": 0.4985164701938629, |
|
"learning_rate": 0.00014629899726345958, |
|
"loss": 1.2859, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.8280590717299579, |
|
"grad_norm": 0.5627548098564148, |
|
"learning_rate": 0.00014428248775471316, |
|
"loss": 1.2939, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.8291139240506329, |
|
"grad_norm": 0.47483623027801514, |
|
"learning_rate": 0.000142293772767289, |
|
"loss": 1.2839, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.830168776371308, |
|
"grad_norm": 0.5388797521591187, |
|
"learning_rate": 0.00014033246919591922, |
|
"loss": 1.2949, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.8312236286919831, |
|
"grad_norm": 0.5436922907829285, |
|
"learning_rate": 0.00013839819921586025, |
|
"loss": 1.3166, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.8322784810126582, |
|
"grad_norm": 0.5270851850509644, |
|
"learning_rate": 0.00013649059021010894, |
|
"loss": 1.3102, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.518153727054596, |
|
"learning_rate": 0.00013460927469762155, |
|
"loss": 1.2925, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.8343881856540084, |
|
"grad_norm": 0.5932394862174988, |
|
"learning_rate": 0.00013275389026252255, |
|
"loss": 1.2998, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.8354430379746836, |
|
"grad_norm": 0.5680364370346069, |
|
"learning_rate": 0.0001309240794842889, |
|
"loss": 1.2977, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.8364978902953587, |
|
"grad_norm": 0.503745436668396, |
|
"learning_rate": 0.00012911948986889664, |
|
"loss": 1.3086, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.8375527426160337, |
|
"grad_norm": 0.5149855017662048, |
|
"learning_rate": 0.00012733977378091664, |
|
"loss": 1.3032, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.8386075949367089, |
|
"grad_norm": 0.5323132276535034, |
|
"learning_rate": 0.00012558458837654633, |
|
"loss": 1.3041, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.8396624472573839, |
|
"grad_norm": 0.48893091082572937, |
|
"learning_rate": 0.00012385359553756422, |
|
"loss": 1.29, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.8407172995780591, |
|
"grad_norm": 0.5255675911903381, |
|
"learning_rate": 0.0001221464618061951, |
|
"loss": 1.2893, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.8417721518987342, |
|
"grad_norm": 0.4878615736961365, |
|
"learning_rate": 0.0001204628583208727, |
|
"loss": 1.2893, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.8428270042194093, |
|
"grad_norm": 0.6673786640167236, |
|
"learning_rate": 0.00011880246075288824, |
|
"loss": 1.3023, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.8438818565400844, |
|
"grad_norm": 0.5215135812759399, |
|
"learning_rate": 0.00011716494924391148, |
|
"loss": 1.2973, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.8449367088607594, |
|
"grad_norm": 0.55266934633255, |
|
"learning_rate": 0.00011555000834437363, |
|
"loss": 1.3037, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.8459915611814346, |
|
"grad_norm": 0.5418053865432739, |
|
"learning_rate": 0.00011395732695269907, |
|
"loss": 1.2964, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.8470464135021097, |
|
"grad_norm": 0.5645815134048462, |
|
"learning_rate": 0.00011238659825537507, |
|
"loss": 1.2947, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.8481012658227848, |
|
"grad_norm": 0.5466896295547485, |
|
"learning_rate": 0.00011083751966784716, |
|
"loss": 1.3066, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.8491561181434599, |
|
"grad_norm": 0.5144866704940796, |
|
"learning_rate": 0.00010930979277622952, |
|
"loss": 1.2879, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.8502109704641351, |
|
"grad_norm": 0.49308452010154724, |
|
"learning_rate": 0.00010780312327981853, |
|
"loss": 1.2961, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.8512658227848101, |
|
"grad_norm": 0.5420898199081421, |
|
"learning_rate": 0.0001063172209343989, |
|
"loss": 1.3015, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.8523206751054853, |
|
"grad_norm": 0.49578407406806946, |
|
"learning_rate": 0.000104851799496331, |
|
"loss": 1.3008, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.8533755274261603, |
|
"grad_norm": 0.5164802670478821, |
|
"learning_rate": 0.00010340657666740917, |
|
"loss": 1.2892, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.8544303797468354, |
|
"grad_norm": 0.5347152948379517, |
|
"learning_rate": 0.00010198127404047976, |
|
"loss": 1.284, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.8554852320675106, |
|
"grad_norm": 0.5997539162635803, |
|
"learning_rate": 0.00010057561704580898, |
|
"loss": 1.3024, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.8565400843881856, |
|
"grad_norm": 0.5137956738471985, |
|
"learning_rate": 9.918933489818986e-05, |
|
"loss": 1.2911, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.8575949367088608, |
|
"grad_norm": 0.5002637505531311, |
|
"learning_rate": 9.782216054477828e-05, |
|
"loss": 1.2947, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.8586497890295358, |
|
"grad_norm": 0.5070739984512329, |
|
"learning_rate": 9.647383061364803e-05, |
|
"loss": 1.291, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.859704641350211, |
|
"grad_norm": 0.49195969104766846, |
|
"learning_rate": 9.514408536305497e-05, |
|
"loss": 1.2882, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.8607594936708861, |
|
"grad_norm": 0.5105777978897095, |
|
"learning_rate": 9.383266863140043e-05, |
|
"loss": 1.2999, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.8618143459915611, |
|
"grad_norm": 0.514224112033844, |
|
"learning_rate": 9.25393277878844e-05, |
|
"loss": 1.2887, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.8628691983122363, |
|
"grad_norm": 0.5406697392463684, |
|
"learning_rate": 9.126381368383881e-05, |
|
"loss": 1.2959, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.8639240506329114, |
|
"grad_norm": 0.592902421951294, |
|
"learning_rate": 9.000588060473158e-05, |
|
"loss": 1.2878, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.8649789029535865, |
|
"grad_norm": 0.5043621063232422, |
|
"learning_rate": 8.876528622283232e-05, |
|
"loss": 1.3015, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.8660337552742616, |
|
"grad_norm": 0.496030330657959, |
|
"learning_rate": 8.754179155053052e-05, |
|
"loss": 1.2948, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.8670886075949367, |
|
"grad_norm": 0.5479779243469238, |
|
"learning_rate": 8.63351608942968e-05, |
|
"loss": 1.3008, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.8681434599156118, |
|
"grad_norm": 0.5193496942520142, |
|
"learning_rate": 8.514516180927926e-05, |
|
"loss": 1.2962, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.869198312236287, |
|
"grad_norm": 0.5200868248939514, |
|
"learning_rate": 8.397156505452524e-05, |
|
"loss": 1.2958, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.870253164556962, |
|
"grad_norm": 0.5099362134933472, |
|
"learning_rate": 8.28141445488205e-05, |
|
"loss": 1.3082, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.8713080168776371, |
|
"grad_norm": 0.48709386587142944, |
|
"learning_rate": 8.167267732713705e-05, |
|
"loss": 1.2984, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.8723628691983122, |
|
"grad_norm": 0.5332502126693726, |
|
"learning_rate": 8.054694349768114e-05, |
|
"loss": 1.2949, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.8734177215189873, |
|
"grad_norm": 0.5007814168930054, |
|
"learning_rate": 7.943672619953359e-05, |
|
"loss": 1.2785, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.8744725738396625, |
|
"grad_norm": 0.5404915809631348, |
|
"learning_rate": 7.834181156087357e-05, |
|
"loss": 1.2791, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.8755274261603375, |
|
"grad_norm": 0.5205032825469971, |
|
"learning_rate": 7.726198865777852e-05, |
|
"loss": 1.3043, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.8765822784810127, |
|
"grad_norm": 0.507988452911377, |
|
"learning_rate": 7.61970494735919e-05, |
|
"loss": 1.2843, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.8776371308016878, |
|
"grad_norm": 0.4945039749145508, |
|
"learning_rate": 7.514678885885086e-05, |
|
"loss": 1.3016, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.8786919831223629, |
|
"grad_norm": 0.49984875321388245, |
|
"learning_rate": 7.411100449176634e-05, |
|
"loss": 1.2944, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.879746835443038, |
|
"grad_norm": 0.5130136013031006, |
|
"learning_rate": 7.308949683924792e-05, |
|
"loss": 1.2817, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.880801687763713, |
|
"grad_norm": 0.5221125483512878, |
|
"learning_rate": 7.208206911846581e-05, |
|
"loss": 1.2852, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.8818565400843882, |
|
"grad_norm": 0.5233004689216614, |
|
"learning_rate": 7.10885272589427e-05, |
|
"loss": 1.286, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.8829113924050633, |
|
"grad_norm": 0.48948031663894653, |
|
"learning_rate": 7.010867986516811e-05, |
|
"loss": 1.2913, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.8839662447257384, |
|
"grad_norm": 0.5290145874023438, |
|
"learning_rate": 6.914233817972799e-05, |
|
"loss": 1.2966, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.8850210970464135, |
|
"grad_norm": 0.5159009099006653, |
|
"learning_rate": 6.818931604694264e-05, |
|
"loss": 1.2893, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.8860759493670886, |
|
"grad_norm": 0.4972204267978668, |
|
"learning_rate": 6.724942987700563e-05, |
|
"loss": 1.2879, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.8871308016877637, |
|
"grad_norm": 0.5020485520362854, |
|
"learning_rate": 6.632249861061733e-05, |
|
"loss": 1.3075, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.8881856540084389, |
|
"grad_norm": 0.4924856126308441, |
|
"learning_rate": 6.540834368410549e-05, |
|
"loss": 1.2947, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 0.8892405063291139, |
|
"grad_norm": 0.5332484245300293, |
|
"learning_rate": 6.4506788995027e-05, |
|
"loss": 1.292, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.890295358649789, |
|
"grad_norm": 0.5037619471549988, |
|
"learning_rate": 6.361766086824344e-05, |
|
"loss": 1.2857, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 0.8913502109704642, |
|
"grad_norm": 0.5982730388641357, |
|
"learning_rate": 6.274078802246449e-05, |
|
"loss": 1.294, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 0.8924050632911392, |
|
"grad_norm": 0.5343823432922363, |
|
"learning_rate": 6.187600153725223e-05, |
|
"loss": 1.2898, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.8934599156118144, |
|
"grad_norm": 0.49162301421165466, |
|
"learning_rate": 6.1023134820480546e-05, |
|
"loss": 1.2915, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 0.8945147679324894, |
|
"grad_norm": 0.5004287362098694, |
|
"learning_rate": 6.0182023576242725e-05, |
|
"loss": 1.2826, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 0.8955696202531646, |
|
"grad_norm": 0.5593549013137817, |
|
"learning_rate": 5.9352505773201664e-05, |
|
"loss": 1.29, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.8966244725738397, |
|
"grad_norm": 0.5120792388916016, |
|
"learning_rate": 5.8534421613376175e-05, |
|
"loss": 1.2808, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.8976793248945147, |
|
"grad_norm": 0.4898166060447693, |
|
"learning_rate": 5.772761350135759e-05, |
|
"loss": 1.2924, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 0.8987341772151899, |
|
"grad_norm": 0.5263928174972534, |
|
"learning_rate": 5.6931926013950586e-05, |
|
"loss": 1.2866, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.8997890295358649, |
|
"grad_norm": 0.4969990849494934, |
|
"learning_rate": 5.61472058702326e-05, |
|
"loss": 1.3002, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 0.9008438818565401, |
|
"grad_norm": 0.49934616684913635, |
|
"learning_rate": 5.53733019020258e-05, |
|
"loss": 1.2964, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 0.9018987341772152, |
|
"grad_norm": 0.5362349152565002, |
|
"learning_rate": 5.4610065024776125e-05, |
|
"loss": 1.2876, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.9029535864978903, |
|
"grad_norm": 0.5168766379356384, |
|
"learning_rate": 5.38573482088337e-05, |
|
"loss": 1.2782, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 0.9040084388185654, |
|
"grad_norm": 0.5859018564224243, |
|
"learning_rate": 5.3115006451129075e-05, |
|
"loss": 1.2994, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 0.9050632911392406, |
|
"grad_norm": 0.5291493535041809, |
|
"learning_rate": 5.2382896747239935e-05, |
|
"loss": 1.2917, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.9061181434599156, |
|
"grad_norm": 0.61119145154953, |
|
"learning_rate": 5.166087806384275e-05, |
|
"loss": 1.2935, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 0.9071729957805907, |
|
"grad_norm": 0.5436091423034668, |
|
"learning_rate": 5.0948811311544186e-05, |
|
"loss": 1.2868, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.9082278481012658, |
|
"grad_norm": 0.5782431364059448, |
|
"learning_rate": 5.024655931808697e-05, |
|
"loss": 1.2882, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.9092827004219409, |
|
"grad_norm": 0.5377077460289001, |
|
"learning_rate": 4.955398680192509e-05, |
|
"loss": 1.2829, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 0.9103375527426161, |
|
"grad_norm": 0.5343286991119385, |
|
"learning_rate": 4.887096034616319e-05, |
|
"loss": 1.2951, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 0.5730398297309875, |
|
"learning_rate": 4.819734837285529e-05, |
|
"loss": 1.2752, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.9124472573839663, |
|
"grad_norm": 0.5420026183128357, |
|
"learning_rate": 4.7533021117657475e-05, |
|
"loss": 1.2816, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 0.9135021097046413, |
|
"grad_norm": 0.5295568108558655, |
|
"learning_rate": 4.687785060483031e-05, |
|
"loss": 1.2809, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 0.9145569620253164, |
|
"grad_norm": 0.4965777099132538, |
|
"learning_rate": 4.623171062258557e-05, |
|
"loss": 1.2902, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.9156118143459916, |
|
"grad_norm": 0.4828559160232544, |
|
"learning_rate": 4.559447669877288e-05, |
|
"loss": 1.2984, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 0.9166666666666666, |
|
"grad_norm": 0.4930232763290405, |
|
"learning_rate": 4.496602607690141e-05, |
|
"loss": 1.2883, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 0.9177215189873418, |
|
"grad_norm": 0.48897314071655273, |
|
"learning_rate": 4.434623769249217e-05, |
|
"loss": 1.2877, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.9187763713080169, |
|
"grad_norm": 0.511944055557251, |
|
"learning_rate": 4.373499214975615e-05, |
|
"loss": 1.2873, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 0.919831223628692, |
|
"grad_norm": 0.4932258129119873, |
|
"learning_rate": 4.313217169859397e-05, |
|
"loss": 1.281, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 0.9208860759493671, |
|
"grad_norm": 0.5023041367530823, |
|
"learning_rate": 4.253766021191256e-05, |
|
"loss": 1.2946, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.9219409282700421, |
|
"grad_norm": 0.49912768602371216, |
|
"learning_rate": 4.19513431632545e-05, |
|
"loss": 1.2842, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 0.9229957805907173, |
|
"grad_norm": 0.5335937738418579, |
|
"learning_rate": 4.1373107604735626e-05, |
|
"loss": 1.2798, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 0.9240506329113924, |
|
"grad_norm": 0.5522021651268005, |
|
"learning_rate": 4.0802842145286876e-05, |
|
"loss": 1.2692, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.9251054852320675, |
|
"grad_norm": 0.5096961855888367, |
|
"learning_rate": 4.024043692919589e-05, |
|
"loss": 1.2813, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 0.9261603375527426, |
|
"grad_norm": 0.497565358877182, |
|
"learning_rate": 3.968578361494449e-05, |
|
"loss": 1.2884, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 0.9272151898734177, |
|
"grad_norm": 0.4803836941719055, |
|
"learning_rate": 3.91387753543378e-05, |
|
"loss": 1.2827, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.9282700421940928, |
|
"grad_norm": 0.5137074589729309, |
|
"learning_rate": 3.859930677192103e-05, |
|
"loss": 1.2796, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.929324894514768, |
|
"grad_norm": 0.48237344622612, |
|
"learning_rate": 3.806727394468005e-05, |
|
"loss": 1.2984, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 0.930379746835443, |
|
"grad_norm": 0.47588059306144714, |
|
"learning_rate": 3.7542574382021635e-05, |
|
"loss": 1.283, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.9314345991561181, |
|
"grad_norm": 0.5075386166572571, |
|
"learning_rate": 3.702510700602975e-05, |
|
"loss": 1.2997, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 0.9324894514767933, |
|
"grad_norm": 0.48565760254859924, |
|
"learning_rate": 3.651477213199394e-05, |
|
"loss": 1.2846, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 0.9335443037974683, |
|
"grad_norm": 0.5382689237594604, |
|
"learning_rate": 3.601147144920609e-05, |
|
"loss": 1.2857, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.9345991561181435, |
|
"grad_norm": 0.5447915196418762, |
|
"learning_rate": 3.5515108002021946e-05, |
|
"loss": 1.2859, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 0.9356540084388185, |
|
"grad_norm": 0.5087395310401917, |
|
"learning_rate": 3.502558617118352e-05, |
|
"loss": 1.2895, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 0.9367088607594937, |
|
"grad_norm": 0.5213615894317627, |
|
"learning_rate": 3.454281165539913e-05, |
|
"loss": 1.29, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.9377637130801688, |
|
"grad_norm": 0.482086718082428, |
|
"learning_rate": 3.406669145317717e-05, |
|
"loss": 1.2906, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 0.9388185654008439, |
|
"grad_norm": 0.4832416772842407, |
|
"learning_rate": 3.359713384491036e-05, |
|
"loss": 1.2817, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.939873417721519, |
|
"grad_norm": 0.5172869563102722, |
|
"learning_rate": 3.313404837520694e-05, |
|
"loss": 1.2812, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.9409282700421941, |
|
"grad_norm": 0.5098752975463867, |
|
"learning_rate": 3.267734583546536e-05, |
|
"loss": 1.2881, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 0.9419831223628692, |
|
"grad_norm": 0.5164980292320251, |
|
"learning_rate": 3.222693824668916e-05, |
|
"loss": 1.2829, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 0.9430379746835443, |
|
"grad_norm": 0.4869752526283264, |
|
"learning_rate": 3.178273884253874e-05, |
|
"loss": 1.2857, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.9440928270042194, |
|
"grad_norm": 0.5009376406669617, |
|
"learning_rate": 3.134466205261674e-05, |
|
"loss": 1.2866, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 0.9451476793248945, |
|
"grad_norm": 0.5258128046989441, |
|
"learning_rate": 3.0912623485983774e-05, |
|
"loss": 1.2884, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 0.9462025316455697, |
|
"grad_norm": 0.5623719096183777, |
|
"learning_rate": 3.048653991490141e-05, |
|
"loss": 1.2861, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.9472573839662447, |
|
"grad_norm": 0.5587673187255859, |
|
"learning_rate": 3.0066329258799184e-05, |
|
"loss": 1.2824, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 0.9483122362869199, |
|
"grad_norm": 0.5034454464912415, |
|
"learning_rate": 2.965191056846266e-05, |
|
"loss": 1.2903, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 0.9493670886075949, |
|
"grad_norm": 0.4845164120197296, |
|
"learning_rate": 2.9243204010439396e-05, |
|
"loss": 1.2824, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.95042194092827, |
|
"grad_norm": 0.5366633534431458, |
|
"learning_rate": 2.8840130851659852e-05, |
|
"loss": 1.2805, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 0.9514767932489452, |
|
"grad_norm": 0.5056827068328857, |
|
"learning_rate": 2.844261344427029e-05, |
|
"loss": 1.282, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 0.9525316455696202, |
|
"grad_norm": 0.5826355814933777, |
|
"learning_rate": 2.805057521067472e-05, |
|
"loss": 1.3008, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.9535864978902954, |
|
"grad_norm": 0.5047662258148193, |
|
"learning_rate": 2.766394062878302e-05, |
|
"loss": 1.277, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 0.9546413502109705, |
|
"grad_norm": 0.4909191131591797, |
|
"learning_rate": 2.7282635217462405e-05, |
|
"loss": 1.288, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 0.9556962025316456, |
|
"grad_norm": 0.4818117618560791, |
|
"learning_rate": 2.6906585522189378e-05, |
|
"loss": 1.2892, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.9567510548523207, |
|
"grad_norm": 0.4789429008960724, |
|
"learning_rate": 2.653571910089951e-05, |
|
"loss": 1.2889, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 0.9578059071729957, |
|
"grad_norm": 0.4879331588745117, |
|
"learning_rate": 2.6169964510032243e-05, |
|
"loss": 1.289, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 0.9588607594936709, |
|
"grad_norm": 0.5181798338890076, |
|
"learning_rate": 2.580925129076798e-05, |
|
"loss": 1.2906, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.959915611814346, |
|
"grad_norm": 0.4963332712650299, |
|
"learning_rate": 2.5453509955454954e-05, |
|
"loss": 1.2691, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 0.9609704641350211, |
|
"grad_norm": 0.4964265525341034, |
|
"learning_rate": 2.510267197422317e-05, |
|
"loss": 1.2805, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 0.9620253164556962, |
|
"grad_norm": 0.5356233716011047, |
|
"learning_rate": 2.4756669761782806e-05, |
|
"loss": 1.292, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.9630801687763713, |
|
"grad_norm": 0.5141054391860962, |
|
"learning_rate": 2.4415436664404643e-05, |
|
"loss": 1.2775, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 0.9641350210970464, |
|
"grad_norm": 0.5709123611450195, |
|
"learning_rate": 2.4078906947079882e-05, |
|
"loss": 1.2943, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 0.9651898734177216, |
|
"grad_norm": 0.535489559173584, |
|
"learning_rate": 2.3747015780857007e-05, |
|
"loss": 1.2818, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.9662447257383966, |
|
"grad_norm": 0.4924616515636444, |
|
"learning_rate": 2.3419699230353144e-05, |
|
"loss": 1.2898, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 0.9672995780590717, |
|
"grad_norm": 0.5021106600761414, |
|
"learning_rate": 2.3096894241437583e-05, |
|
"loss": 1.2873, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 0.9683544303797469, |
|
"grad_norm": 0.48365768790245056, |
|
"learning_rate": 2.2778538629085057e-05, |
|
"loss": 1.2852, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.9694092827004219, |
|
"grad_norm": 0.5007200241088867, |
|
"learning_rate": 2.2464571065396428e-05, |
|
"loss": 1.2809, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 0.9704641350210971, |
|
"grad_norm": 0.49459993839263916, |
|
"learning_rate": 2.2154931067784525e-05, |
|
"loss": 1.2835, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 0.9715189873417721, |
|
"grad_norm": 0.49230140447616577, |
|
"learning_rate": 2.1849558987322783e-05, |
|
"loss": 1.2836, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.9725738396624473, |
|
"grad_norm": 0.5009668469429016, |
|
"learning_rate": 2.1548395997254516e-05, |
|
"loss": 1.283, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 0.9736286919831224, |
|
"grad_norm": 0.4961964786052704, |
|
"learning_rate": 2.1251384081660546e-05, |
|
"loss": 1.2728, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 0.9746835443037974, |
|
"grad_norm": 0.5043670535087585, |
|
"learning_rate": 2.0958466024283035e-05, |
|
"loss": 1.2674, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.9757383966244726, |
|
"grad_norm": 0.49852392077445984, |
|
"learning_rate": 2.0669585397503362e-05, |
|
"loss": 1.2813, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 0.9767932489451476, |
|
"grad_norm": 0.5222915410995483, |
|
"learning_rate": 2.0384686551471954e-05, |
|
"loss": 1.2765, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 0.9778481012658228, |
|
"grad_norm": 0.4927261173725128, |
|
"learning_rate": 2.0103714603387898e-05, |
|
"loss": 1.2882, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.9789029535864979, |
|
"grad_norm": 0.4864712059497833, |
|
"learning_rate": 1.9826615426926342e-05, |
|
"loss": 1.2735, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 0.979957805907173, |
|
"grad_norm": 0.489318311214447, |
|
"learning_rate": 1.9553335641811623e-05, |
|
"loss": 1.2807, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 0.9810126582278481, |
|
"grad_norm": 0.49541473388671875, |
|
"learning_rate": 1.9283822603534143e-05, |
|
"loss": 1.2812, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.9820675105485233, |
|
"grad_norm": 0.5180107355117798, |
|
"learning_rate": 1.90180243932089e-05, |
|
"loss": 1.2831, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 0.9831223628691983, |
|
"grad_norm": 0.5209818482398987, |
|
"learning_rate": 1.8755889807573868e-05, |
|
"loss": 1.2766, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 0.9841772151898734, |
|
"grad_norm": 0.5105024576187134, |
|
"learning_rate": 1.8497368349126255e-05, |
|
"loss": 1.2884, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.9852320675105485, |
|
"grad_norm": 0.4965643882751465, |
|
"learning_rate": 1.824241021639465e-05, |
|
"loss": 1.2881, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 0.9862869198312236, |
|
"grad_norm": 0.4937804639339447, |
|
"learning_rate": 1.799096629434529e-05, |
|
"loss": 1.2987, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 0.9873417721518988, |
|
"grad_norm": 0.49657437205314636, |
|
"learning_rate": 1.7742988144920578e-05, |
|
"loss": 1.2896, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.9883966244725738, |
|
"grad_norm": 0.5091279745101929, |
|
"learning_rate": 1.7498427997707978e-05, |
|
"loss": 1.2874, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 0.989451476793249, |
|
"grad_norm": 0.5247161388397217, |
|
"learning_rate": 1.7257238740737548e-05, |
|
"loss": 1.2736, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 0.990506329113924, |
|
"grad_norm": 0.5218049883842468, |
|
"learning_rate": 1.7019373911406307e-05, |
|
"loss": 1.2784, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.9915611814345991, |
|
"grad_norm": 0.4994415044784546, |
|
"learning_rate": 1.67847876875277e-05, |
|
"loss": 1.2921, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 0.9926160337552743, |
|
"grad_norm": 0.5280937552452087, |
|
"learning_rate": 1.655343487850443e-05, |
|
"loss": 1.2688, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 0.9936708860759493, |
|
"grad_norm": 0.5012044310569763, |
|
"learning_rate": 1.6325270916622947e-05, |
|
"loss": 1.2853, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.9947257383966245, |
|
"grad_norm": 0.48524028062820435, |
|
"learning_rate": 1.610025184846797e-05, |
|
"loss": 1.286, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 0.9957805907172996, |
|
"grad_norm": 0.4760825037956238, |
|
"learning_rate": 1.587833432645528e-05, |
|
"loss": 1.2819, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 0.9968354430379747, |
|
"grad_norm": 0.5371087193489075, |
|
"learning_rate": 1.5659475600481297e-05, |
|
"loss": 1.2924, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.9978902953586498, |
|
"grad_norm": 0.5069384574890137, |
|
"learning_rate": 1.544363350968769e-05, |
|
"loss": 1.2753, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 0.9989451476793249, |
|
"grad_norm": 0.5072888135910034, |
|
"learning_rate": 1.523076647433954e-05, |
|
"loss": 1.2894, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": Infinity, |
|
"learning_rate": 1.5041695911245136e-05, |
|
"loss": 1.2784, |
|
"step": 9480 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 9480, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.832308198648013e+16, |
|
"train_batch_size": 1024, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|