|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.9996055735491556, |
|
"eval_steps": 500, |
|
"global_step": 32956, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.030340496218815658, |
|
"grad_norm": 1.0293707847595215, |
|
"learning_rate": 9.924743581962737e-05, |
|
"loss": 1.2653, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.060680992437631316, |
|
"grad_norm": 0.8869585990905762, |
|
"learning_rate": 9.848880257328398e-05, |
|
"loss": 0.757, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.09102148865644698, |
|
"grad_norm": 0.9423841834068298, |
|
"learning_rate": 9.773016932694059e-05, |
|
"loss": 0.6578, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.12136198487526263, |
|
"grad_norm": 0.7434495091438293, |
|
"learning_rate": 9.697153608059721e-05, |
|
"loss": 0.6204, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1517024810940783, |
|
"grad_norm": 0.6856955885887146, |
|
"learning_rate": 9.621290283425382e-05, |
|
"loss": 0.6006, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.18204297731289396, |
|
"grad_norm": 0.6636335849761963, |
|
"learning_rate": 9.545426958791042e-05, |
|
"loss": 0.5822, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.2123834735317096, |
|
"grad_norm": 0.6647191643714905, |
|
"learning_rate": 9.469563634156705e-05, |
|
"loss": 0.5681, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.24272396975052526, |
|
"grad_norm": 0.825007438659668, |
|
"learning_rate": 9.393700309522365e-05, |
|
"loss": 0.5531, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.2730644659693409, |
|
"grad_norm": 0.651090681552887, |
|
"learning_rate": 9.317836984888026e-05, |
|
"loss": 0.5387, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.3034049621881566, |
|
"grad_norm": 0.7366721034049988, |
|
"learning_rate": 9.241973660253687e-05, |
|
"loss": 0.517, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.33374545840697223, |
|
"grad_norm": 0.8244208693504333, |
|
"learning_rate": 9.166110335619349e-05, |
|
"loss": 0.4931, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.3640859546257879, |
|
"grad_norm": 0.7282492518424988, |
|
"learning_rate": 9.09024701098501e-05, |
|
"loss": 0.4741, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.39442645084460354, |
|
"grad_norm": 0.877441942691803, |
|
"learning_rate": 9.014383686350671e-05, |
|
"loss": 0.4543, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.4247669470634192, |
|
"grad_norm": 0.8125002980232239, |
|
"learning_rate": 8.938520361716333e-05, |
|
"loss": 0.4301, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.4551074432822349, |
|
"grad_norm": 0.7860125303268433, |
|
"learning_rate": 8.862657037081994e-05, |
|
"loss": 0.4165, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.48544793950105053, |
|
"grad_norm": 0.8634310364723206, |
|
"learning_rate": 8.786793712447655e-05, |
|
"loss": 0.3964, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.5157884357198662, |
|
"grad_norm": 0.8578837513923645, |
|
"learning_rate": 8.710930387813317e-05, |
|
"loss": 0.3847, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.5461289319386818, |
|
"grad_norm": 0.6492015719413757, |
|
"learning_rate": 8.635067063178978e-05, |
|
"loss": 0.3763, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.5764694281574975, |
|
"grad_norm": 0.7463727593421936, |
|
"learning_rate": 8.559203738544639e-05, |
|
"loss": 0.36, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.6068099243763132, |
|
"grad_norm": 0.7797712683677673, |
|
"learning_rate": 8.483340413910301e-05, |
|
"loss": 0.3597, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.6371504205951288, |
|
"grad_norm": 0.8424202799797058, |
|
"learning_rate": 8.407477089275962e-05, |
|
"loss": 0.344, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.6674909168139445, |
|
"grad_norm": 0.7570486664772034, |
|
"learning_rate": 8.331613764641622e-05, |
|
"loss": 0.3402, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.6978314130327602, |
|
"grad_norm": 0.741788923740387, |
|
"learning_rate": 8.255750440007283e-05, |
|
"loss": 0.3378, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.7281719092515758, |
|
"grad_norm": 0.7841416597366333, |
|
"learning_rate": 8.179887115372944e-05, |
|
"loss": 0.3305, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.7585124054703914, |
|
"grad_norm": 0.7679227590560913, |
|
"learning_rate": 8.104023790738605e-05, |
|
"loss": 0.3263, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.7888529016892071, |
|
"grad_norm": 0.7030394673347473, |
|
"learning_rate": 8.028160466104267e-05, |
|
"loss": 0.3232, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.8191933979080228, |
|
"grad_norm": 0.7032948136329651, |
|
"learning_rate": 7.952297141469928e-05, |
|
"loss": 0.3142, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.8495338941268384, |
|
"grad_norm": 0.7051456570625305, |
|
"learning_rate": 7.876433816835589e-05, |
|
"loss": 0.3101, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.8798743903456541, |
|
"grad_norm": 0.680454432964325, |
|
"learning_rate": 7.80057049220125e-05, |
|
"loss": 0.3128, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.9102148865644698, |
|
"grad_norm": 0.6378083229064941, |
|
"learning_rate": 7.724707167566912e-05, |
|
"loss": 0.3058, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.9405553827832854, |
|
"grad_norm": 0.6744751930236816, |
|
"learning_rate": 7.648843842932573e-05, |
|
"loss": 0.3021, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.9708958790021011, |
|
"grad_norm": 0.6540088057518005, |
|
"learning_rate": 7.572980518298233e-05, |
|
"loss": 0.3028, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.0012363752209168, |
|
"grad_norm": 0.6479789614677429, |
|
"learning_rate": 7.497117193663896e-05, |
|
"loss": 0.2993, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 1.0315768714397324, |
|
"grad_norm": 0.696811854839325, |
|
"learning_rate": 7.421253869029556e-05, |
|
"loss": 0.29, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.061917367658548, |
|
"grad_norm": 0.6420058608055115, |
|
"learning_rate": 7.345390544395217e-05, |
|
"loss": 0.2862, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 1.0922578638773637, |
|
"grad_norm": 0.7226221561431885, |
|
"learning_rate": 7.26952721976088e-05, |
|
"loss": 0.2876, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.1225983600961793, |
|
"grad_norm": 0.5515549182891846, |
|
"learning_rate": 7.19366389512654e-05, |
|
"loss": 0.284, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 1.1529388563149952, |
|
"grad_norm": 0.5973398685455322, |
|
"learning_rate": 7.117800570492201e-05, |
|
"loss": 0.2828, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.1832793525338108, |
|
"grad_norm": 0.6326724886894226, |
|
"learning_rate": 7.041937245857862e-05, |
|
"loss": 0.281, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 1.2136198487526264, |
|
"grad_norm": 0.5345270037651062, |
|
"learning_rate": 6.966073921223524e-05, |
|
"loss": 0.2817, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.243960344971442, |
|
"grad_norm": 0.6941035389900208, |
|
"learning_rate": 6.890210596589185e-05, |
|
"loss": 0.2783, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 1.2743008411902577, |
|
"grad_norm": 0.499006062746048, |
|
"learning_rate": 6.814347271954846e-05, |
|
"loss": 0.2765, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.3046413374090733, |
|
"grad_norm": 0.5554171204566956, |
|
"learning_rate": 6.738483947320508e-05, |
|
"loss": 0.2769, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 1.334981833627889, |
|
"grad_norm": 0.6185881495475769, |
|
"learning_rate": 6.662620622686169e-05, |
|
"loss": 0.2737, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.3653223298467045, |
|
"grad_norm": 0.5652614235877991, |
|
"learning_rate": 6.58675729805183e-05, |
|
"loss": 0.2736, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 1.3956628260655202, |
|
"grad_norm": 0.602508008480072, |
|
"learning_rate": 6.510893973417492e-05, |
|
"loss": 0.2726, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.426003322284336, |
|
"grad_norm": 0.6180042028427124, |
|
"learning_rate": 6.435030648783153e-05, |
|
"loss": 0.2713, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 1.4563438185031516, |
|
"grad_norm": 0.510152280330658, |
|
"learning_rate": 6.359167324148813e-05, |
|
"loss": 0.2697, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.4866843147219673, |
|
"grad_norm": 0.5209817886352539, |
|
"learning_rate": 6.283303999514476e-05, |
|
"loss": 0.2724, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 1.517024810940783, |
|
"grad_norm": 0.5717406868934631, |
|
"learning_rate": 6.207440674880136e-05, |
|
"loss": 0.2675, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.5473653071595987, |
|
"grad_norm": 0.5593615770339966, |
|
"learning_rate": 6.131577350245797e-05, |
|
"loss": 0.2683, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 1.5777058033784144, |
|
"grad_norm": 0.6112098693847656, |
|
"learning_rate": 6.055714025611459e-05, |
|
"loss": 0.2637, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.60804629959723, |
|
"grad_norm": 0.6081228256225586, |
|
"learning_rate": 5.97985070097712e-05, |
|
"loss": 0.2662, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 1.6383867958160456, |
|
"grad_norm": 0.6320655345916748, |
|
"learning_rate": 5.903987376342781e-05, |
|
"loss": 0.2622, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.6687272920348613, |
|
"grad_norm": 0.5471298098564148, |
|
"learning_rate": 5.8281240517084425e-05, |
|
"loss": 0.2596, |
|
"step": 13750 |
|
}, |
|
{ |
|
"epoch": 1.6990677882536769, |
|
"grad_norm": 0.5194515585899353, |
|
"learning_rate": 5.7522607270741034e-05, |
|
"loss": 0.2627, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.7294082844724925, |
|
"grad_norm": 0.64277184009552, |
|
"learning_rate": 5.676397402439765e-05, |
|
"loss": 0.26, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 1.7597487806913081, |
|
"grad_norm": 0.5324087738990784, |
|
"learning_rate": 5.6005340778054264e-05, |
|
"loss": 0.2608, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.7900892769101238, |
|
"grad_norm": 0.5574278235435486, |
|
"learning_rate": 5.524670753171087e-05, |
|
"loss": 0.2617, |
|
"step": 14750 |
|
}, |
|
{ |
|
"epoch": 1.8204297731289394, |
|
"grad_norm": 0.5711286664009094, |
|
"learning_rate": 5.448807428536749e-05, |
|
"loss": 0.2571, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.8507702693477552, |
|
"grad_norm": 0.5730472207069397, |
|
"learning_rate": 5.3729441039024095e-05, |
|
"loss": 0.2563, |
|
"step": 15250 |
|
}, |
|
{ |
|
"epoch": 1.8811107655665709, |
|
"grad_norm": 0.6286032199859619, |
|
"learning_rate": 5.297080779268071e-05, |
|
"loss": 0.2546, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.9114512617853865, |
|
"grad_norm": 0.5809808373451233, |
|
"learning_rate": 5.2212174546337325e-05, |
|
"loss": 0.2548, |
|
"step": 15750 |
|
}, |
|
{ |
|
"epoch": 1.9417917580042021, |
|
"grad_norm": 0.5805879831314087, |
|
"learning_rate": 5.1453541299993933e-05, |
|
"loss": 0.2553, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.972132254223018, |
|
"grad_norm": 0.6372638940811157, |
|
"learning_rate": 5.069490805365055e-05, |
|
"loss": 0.2524, |
|
"step": 16250 |
|
}, |
|
{ |
|
"epoch": 2.0024727504418336, |
|
"grad_norm": 0.643139660358429, |
|
"learning_rate": 4.993627480730716e-05, |
|
"loss": 0.251, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.032813246660649, |
|
"grad_norm": 0.5501179099082947, |
|
"learning_rate": 4.917764156096377e-05, |
|
"loss": 0.244, |
|
"step": 16750 |
|
}, |
|
{ |
|
"epoch": 2.063153742879465, |
|
"grad_norm": 0.6013950109481812, |
|
"learning_rate": 4.841900831462039e-05, |
|
"loss": 0.2448, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.0934942390982805, |
|
"grad_norm": 0.4996771216392517, |
|
"learning_rate": 4.7660375068276995e-05, |
|
"loss": 0.2442, |
|
"step": 17250 |
|
}, |
|
{ |
|
"epoch": 2.123834735317096, |
|
"grad_norm": 0.6059885025024414, |
|
"learning_rate": 4.690174182193361e-05, |
|
"loss": 0.242, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.1541752315359117, |
|
"grad_norm": 0.48200371861457825, |
|
"learning_rate": 4.614310857559022e-05, |
|
"loss": 0.2418, |
|
"step": 17750 |
|
}, |
|
{ |
|
"epoch": 2.1845157277547274, |
|
"grad_norm": 0.6055967211723328, |
|
"learning_rate": 4.5384475329246827e-05, |
|
"loss": 0.2428, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.214856223973543, |
|
"grad_norm": 0.5236734747886658, |
|
"learning_rate": 4.462584208290344e-05, |
|
"loss": 0.2409, |
|
"step": 18250 |
|
}, |
|
{ |
|
"epoch": 2.2451967201923586, |
|
"grad_norm": 0.6422255635261536, |
|
"learning_rate": 4.386720883656005e-05, |
|
"loss": 0.2415, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.2755372164111742, |
|
"grad_norm": 0.545559823513031, |
|
"learning_rate": 4.3108575590216665e-05, |
|
"loss": 0.242, |
|
"step": 18750 |
|
}, |
|
{ |
|
"epoch": 2.3058777126299903, |
|
"grad_norm": 0.547564685344696, |
|
"learning_rate": 4.234994234387328e-05, |
|
"loss": 0.2406, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.3362182088488055, |
|
"grad_norm": 0.5706421732902527, |
|
"learning_rate": 4.159130909752989e-05, |
|
"loss": 0.2431, |
|
"step": 19250 |
|
}, |
|
{ |
|
"epoch": 2.3665587050676216, |
|
"grad_norm": 0.6321772933006287, |
|
"learning_rate": 4.08326758511865e-05, |
|
"loss": 0.2378, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.396899201286437, |
|
"grad_norm": 0.6109116077423096, |
|
"learning_rate": 4.007404260484312e-05, |
|
"loss": 0.2374, |
|
"step": 19750 |
|
}, |
|
{ |
|
"epoch": 2.427239697505253, |
|
"grad_norm": 0.4645892083644867, |
|
"learning_rate": 3.9315409358499727e-05, |
|
"loss": 0.2365, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.4575801937240684, |
|
"grad_norm": 0.5845937728881836, |
|
"learning_rate": 3.855677611215634e-05, |
|
"loss": 0.2396, |
|
"step": 20250 |
|
}, |
|
{ |
|
"epoch": 2.487920689942884, |
|
"grad_norm": 0.6609899401664734, |
|
"learning_rate": 3.779814286581295e-05, |
|
"loss": 0.2358, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.5182611861616997, |
|
"grad_norm": 0.6136410236358643, |
|
"learning_rate": 3.7039509619469565e-05, |
|
"loss": 0.2367, |
|
"step": 20750 |
|
}, |
|
{ |
|
"epoch": 2.5486016823805153, |
|
"grad_norm": 0.6023163795471191, |
|
"learning_rate": 3.628087637312618e-05, |
|
"loss": 0.2342, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.578942178599331, |
|
"grad_norm": 0.5570552349090576, |
|
"learning_rate": 3.552224312678279e-05, |
|
"loss": 0.2368, |
|
"step": 21250 |
|
}, |
|
{ |
|
"epoch": 2.6092826748181466, |
|
"grad_norm": 0.5860863327980042, |
|
"learning_rate": 3.47636098804394e-05, |
|
"loss": 0.2345, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.639623171036962, |
|
"grad_norm": 0.6390525698661804, |
|
"learning_rate": 3.400497663409601e-05, |
|
"loss": 0.2365, |
|
"step": 21750 |
|
}, |
|
{ |
|
"epoch": 2.669963667255778, |
|
"grad_norm": 0.6538860201835632, |
|
"learning_rate": 3.3246343387752626e-05, |
|
"loss": 0.2335, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.700304163474594, |
|
"grad_norm": 0.5609804391860962, |
|
"learning_rate": 3.248771014140924e-05, |
|
"loss": 0.2366, |
|
"step": 22250 |
|
}, |
|
{ |
|
"epoch": 2.730644659693409, |
|
"grad_norm": 0.5518357753753662, |
|
"learning_rate": 3.172907689506585e-05, |
|
"loss": 0.2335, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 2.760985155912225, |
|
"grad_norm": 0.6421113014221191, |
|
"learning_rate": 3.0970443648722465e-05, |
|
"loss": 0.2348, |
|
"step": 22750 |
|
}, |
|
{ |
|
"epoch": 2.7913256521310403, |
|
"grad_norm": 0.6312738656997681, |
|
"learning_rate": 3.0211810402379076e-05, |
|
"loss": 0.2324, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.8216661483498564, |
|
"grad_norm": 0.6342710256576538, |
|
"learning_rate": 2.9453177156035688e-05, |
|
"loss": 0.2291, |
|
"step": 23250 |
|
}, |
|
{ |
|
"epoch": 2.852006644568672, |
|
"grad_norm": 0.5744002461433411, |
|
"learning_rate": 2.86945439096923e-05, |
|
"loss": 0.2318, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 2.8823471407874877, |
|
"grad_norm": 0.6222126483917236, |
|
"learning_rate": 2.7935910663348915e-05, |
|
"loss": 0.2319, |
|
"step": 23750 |
|
}, |
|
{ |
|
"epoch": 2.9126876370063033, |
|
"grad_norm": 0.5072076916694641, |
|
"learning_rate": 2.7177277417005526e-05, |
|
"loss": 0.2288, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 2.943028133225119, |
|
"grad_norm": 0.6769903898239136, |
|
"learning_rate": 2.6418644170662138e-05, |
|
"loss": 0.2296, |
|
"step": 24250 |
|
}, |
|
{ |
|
"epoch": 2.9733686294439345, |
|
"grad_norm": 0.6178023815155029, |
|
"learning_rate": 2.566001092431875e-05, |
|
"loss": 0.228, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 3.00370912566275, |
|
"grad_norm": 0.623375654220581, |
|
"learning_rate": 2.490137767797536e-05, |
|
"loss": 0.2271, |
|
"step": 24750 |
|
}, |
|
{ |
|
"epoch": 3.034049621881566, |
|
"grad_norm": 0.5049629211425781, |
|
"learning_rate": 2.4142744431631973e-05, |
|
"loss": 0.2177, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 3.0643901181003814, |
|
"grad_norm": 0.5621640682220459, |
|
"learning_rate": 2.3384111185288585e-05, |
|
"loss": 0.2203, |
|
"step": 25250 |
|
}, |
|
{ |
|
"epoch": 3.094730614319197, |
|
"grad_norm": 0.6677132844924927, |
|
"learning_rate": 2.2625477938945196e-05, |
|
"loss": 0.2188, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 3.1250711105380127, |
|
"grad_norm": 0.6030067801475525, |
|
"learning_rate": 2.1866844692601808e-05, |
|
"loss": 0.2197, |
|
"step": 25750 |
|
}, |
|
{ |
|
"epoch": 3.1554116067568283, |
|
"grad_norm": 0.6289698481559753, |
|
"learning_rate": 2.1108211446258423e-05, |
|
"loss": 0.2206, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 3.1857521029756444, |
|
"grad_norm": 0.650068461894989, |
|
"learning_rate": 2.0349578199915035e-05, |
|
"loss": 0.2193, |
|
"step": 26250 |
|
}, |
|
{ |
|
"epoch": 3.21609259919446, |
|
"grad_norm": 0.6510699987411499, |
|
"learning_rate": 1.9590944953571646e-05, |
|
"loss": 0.218, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 3.2464330954132756, |
|
"grad_norm": 0.6897627115249634, |
|
"learning_rate": 1.8832311707228258e-05, |
|
"loss": 0.2172, |
|
"step": 26750 |
|
}, |
|
{ |
|
"epoch": 3.2767735916320913, |
|
"grad_norm": 0.6440379023551941, |
|
"learning_rate": 1.8073678460884873e-05, |
|
"loss": 0.2157, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 3.307114087850907, |
|
"grad_norm": 0.6011075973510742, |
|
"learning_rate": 1.7315045214541485e-05, |
|
"loss": 0.2159, |
|
"step": 27250 |
|
}, |
|
{ |
|
"epoch": 3.3374545840697225, |
|
"grad_norm": 0.6770527362823486, |
|
"learning_rate": 1.6556411968198096e-05, |
|
"loss": 0.2178, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 3.367795080288538, |
|
"grad_norm": 0.5674268007278442, |
|
"learning_rate": 1.5797778721854708e-05, |
|
"loss": 0.2155, |
|
"step": 27750 |
|
}, |
|
{ |
|
"epoch": 3.3981355765073538, |
|
"grad_norm": 0.6782290935516357, |
|
"learning_rate": 1.5039145475511321e-05, |
|
"loss": 0.2165, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 3.4284760727261694, |
|
"grad_norm": 0.6378525495529175, |
|
"learning_rate": 1.4280512229167931e-05, |
|
"loss": 0.2165, |
|
"step": 28250 |
|
}, |
|
{ |
|
"epoch": 3.458816568944985, |
|
"grad_norm": 0.6417750716209412, |
|
"learning_rate": 1.3521878982824543e-05, |
|
"loss": 0.2154, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 3.4891570651638006, |
|
"grad_norm": 0.733974039554596, |
|
"learning_rate": 1.2763245736481154e-05, |
|
"loss": 0.2145, |
|
"step": 28750 |
|
}, |
|
{ |
|
"epoch": 3.5194975613826163, |
|
"grad_norm": 0.6935612559318542, |
|
"learning_rate": 1.200461249013777e-05, |
|
"loss": 0.2126, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 3.5498380576014323, |
|
"grad_norm": 0.6449461579322815, |
|
"learning_rate": 1.124597924379438e-05, |
|
"loss": 0.2133, |
|
"step": 29250 |
|
}, |
|
{ |
|
"epoch": 3.5801785538202475, |
|
"grad_norm": 0.630962610244751, |
|
"learning_rate": 1.0487345997450993e-05, |
|
"loss": 0.2137, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 3.6105190500390636, |
|
"grad_norm": 0.6320120692253113, |
|
"learning_rate": 9.728712751107604e-06, |
|
"loss": 0.2151, |
|
"step": 29750 |
|
}, |
|
{ |
|
"epoch": 3.6408595462578788, |
|
"grad_norm": 0.6066524386405945, |
|
"learning_rate": 8.970079504764218e-06, |
|
"loss": 0.2132, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 3.671200042476695, |
|
"grad_norm": 0.707771897315979, |
|
"learning_rate": 8.21144625842083e-06, |
|
"loss": 0.2122, |
|
"step": 30250 |
|
}, |
|
{ |
|
"epoch": 3.7015405386955105, |
|
"grad_norm": 0.6526840925216675, |
|
"learning_rate": 7.452813012077442e-06, |
|
"loss": 0.2107, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 3.731881034914326, |
|
"grad_norm": 0.7959334254264832, |
|
"learning_rate": 6.694179765734054e-06, |
|
"loss": 0.2119, |
|
"step": 30750 |
|
}, |
|
{ |
|
"epoch": 3.7622215311331417, |
|
"grad_norm": 0.6414825320243835, |
|
"learning_rate": 5.935546519390666e-06, |
|
"loss": 0.2092, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 3.7925620273519574, |
|
"grad_norm": 0.6206453442573547, |
|
"learning_rate": 5.1769132730472785e-06, |
|
"loss": 0.2105, |
|
"step": 31250 |
|
}, |
|
{ |
|
"epoch": 3.822902523570773, |
|
"grad_norm": 0.6464530825614929, |
|
"learning_rate": 4.418280026703891e-06, |
|
"loss": 0.2109, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 3.8532430197895886, |
|
"grad_norm": 0.6245775818824768, |
|
"learning_rate": 3.6596467803605027e-06, |
|
"loss": 0.2108, |
|
"step": 31750 |
|
}, |
|
{ |
|
"epoch": 3.8835835160084042, |
|
"grad_norm": 0.6799646615982056, |
|
"learning_rate": 2.9010135340171147e-06, |
|
"loss": 0.2116, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 3.91392401222722, |
|
"grad_norm": 0.6648467183113098, |
|
"learning_rate": 2.1423802876737272e-06, |
|
"loss": 0.2108, |
|
"step": 32250 |
|
}, |
|
{ |
|
"epoch": 3.9442645084460355, |
|
"grad_norm": 0.6555809378623962, |
|
"learning_rate": 1.3837470413303393e-06, |
|
"loss": 0.2117, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 3.974605004664851, |
|
"grad_norm": 0.5603693127632141, |
|
"learning_rate": 6.251137949869516e-07, |
|
"loss": 0.209, |
|
"step": 32750 |
|
}, |
|
{ |
|
"epoch": 3.9996055735491556, |
|
"step": 32956, |
|
"total_flos": 5.234804195348718e+18, |
|
"train_loss": 0.2934305334803057, |
|
"train_runtime": 64350.2603, |
|
"train_samples_per_second": 32.78, |
|
"train_steps_per_second": 0.512 |
|
} |
|
], |
|
"logging_steps": 250, |
|
"max_steps": 32956, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.234804195348718e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|