{ "best_metric": null, "best_model_checkpoint": null, "epoch": 2.998678996036988, "eval_steps": 500, "global_step": 5676, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.005284015852047556, "grad_norm": 23.222759246826172, "learning_rate": 9.982381959126146e-05, "loss": 10.4796, "step": 10 }, { "epoch": 0.010568031704095112, "grad_norm": 81.25479125976562, "learning_rate": 9.964763918252291e-05, "loss": 9.4214, "step": 20 }, { "epoch": 0.015852047556142668, "grad_norm": 168.46482849121094, "learning_rate": 9.947145877378437e-05, "loss": 7.1415, "step": 30 }, { "epoch": 0.021136063408190225, "grad_norm": 24.370380401611328, "learning_rate": 9.929527836504581e-05, "loss": 7.6736, "step": 40 }, { "epoch": 0.026420079260237782, "grad_norm": 29.28400993347168, "learning_rate": 9.911909795630726e-05, "loss": 6.7861, "step": 50 }, { "epoch": 0.031704095112285335, "grad_norm": 22.963659286499023, "learning_rate": 9.894291754756872e-05, "loss": 7.6953, "step": 60 }, { "epoch": 0.036988110964332896, "grad_norm": 24.067852020263672, "learning_rate": 9.876673713883017e-05, "loss": 7.1323, "step": 70 }, { "epoch": 0.04227212681638045, "grad_norm": 27.10059928894043, "learning_rate": 9.859055673009162e-05, "loss": 6.7526, "step": 80 }, { "epoch": 0.047556142668428, "grad_norm": 24.723169326782227, "learning_rate": 9.841437632135308e-05, "loss": 6.8244, "step": 90 }, { "epoch": 0.052840158520475564, "grad_norm": 17.904335021972656, "learning_rate": 9.823819591261452e-05, "loss": 6.2243, "step": 100 }, { "epoch": 0.05812417437252312, "grad_norm": 28.342897415161133, "learning_rate": 9.806201550387597e-05, "loss": 6.6306, "step": 110 }, { "epoch": 0.06340819022457067, "grad_norm": 18.443471908569336, "learning_rate": 9.788583509513743e-05, "loss": 6.3406, "step": 120 }, { "epoch": 0.06869220607661823, "grad_norm": 22.697484970092773, "learning_rate": 9.770965468639888e-05, "loss": 6.5713, "step": 130 }, { "epoch": 0.07397622192866579, "grad_norm": 35.90569305419922, "learning_rate": 9.753347427766033e-05, "loss": 6.8458, "step": 140 }, { "epoch": 0.07926023778071334, "grad_norm": 18.29481315612793, "learning_rate": 9.735729386892179e-05, "loss": 6.7696, "step": 150 }, { "epoch": 0.0845442536327609, "grad_norm": 30.811386108398438, "learning_rate": 9.718111346018324e-05, "loss": 6.9786, "step": 160 }, { "epoch": 0.08982826948480846, "grad_norm": 16.986886978149414, "learning_rate": 9.700493305144468e-05, "loss": 7.6117, "step": 170 }, { "epoch": 0.095112285336856, "grad_norm": 19.391592025756836, "learning_rate": 9.682875264270614e-05, "loss": 5.9932, "step": 180 }, { "epoch": 0.10039630118890357, "grad_norm": 18.568567276000977, "learning_rate": 9.665257223396759e-05, "loss": 6.1379, "step": 190 }, { "epoch": 0.10568031704095113, "grad_norm": 17.993305206298828, "learning_rate": 9.647639182522904e-05, "loss": 5.706, "step": 200 }, { "epoch": 0.11096433289299867, "grad_norm": 24.294239044189453, "learning_rate": 9.63002114164905e-05, "loss": 6.4557, "step": 210 }, { "epoch": 0.11624834874504623, "grad_norm": 23.3104190826416, "learning_rate": 9.612403100775195e-05, "loss": 6.2431, "step": 220 }, { "epoch": 0.1215323645970938, "grad_norm": 15.693617820739746, "learning_rate": 9.594785059901339e-05, "loss": 6.0987, "step": 230 }, { "epoch": 0.12681638044914134, "grad_norm": 15.591407775878906, "learning_rate": 9.577167019027485e-05, "loss": 5.6491, "step": 240 }, { "epoch": 0.13210039630118892, "grad_norm": 39.194488525390625, "learning_rate": 9.55954897815363e-05, "loss": 6.3163, "step": 250 }, { "epoch": 0.13738441215323646, "grad_norm": 17.902145385742188, "learning_rate": 9.541930937279775e-05, "loss": 6.6775, "step": 260 }, { "epoch": 0.142668428005284, "grad_norm": 19.417524337768555, "learning_rate": 9.524312896405921e-05, "loss": 6.1561, "step": 270 }, { "epoch": 0.14795244385733158, "grad_norm": 13.955491065979004, "learning_rate": 9.506694855532066e-05, "loss": 6.0323, "step": 280 }, { "epoch": 0.15323645970937913, "grad_norm": 15.010894775390625, "learning_rate": 9.48907681465821e-05, "loss": 6.7402, "step": 290 }, { "epoch": 0.15852047556142668, "grad_norm": 13.98841667175293, "learning_rate": 9.471458773784356e-05, "loss": 5.8278, "step": 300 }, { "epoch": 0.16380449141347425, "grad_norm": 31.17317008972168, "learning_rate": 9.453840732910501e-05, "loss": 7.4579, "step": 310 }, { "epoch": 0.1690885072655218, "grad_norm": 40.382537841796875, "learning_rate": 9.436222692036646e-05, "loss": 6.7915, "step": 320 }, { "epoch": 0.17437252311756934, "grad_norm": 17.096555709838867, "learning_rate": 9.418604651162792e-05, "loss": 5.9039, "step": 330 }, { "epoch": 0.17965653896961692, "grad_norm": 18.923612594604492, "learning_rate": 9.400986610288936e-05, "loss": 5.9329, "step": 340 }, { "epoch": 0.18494055482166447, "grad_norm": 26.14301300048828, "learning_rate": 9.383368569415081e-05, "loss": 6.2683, "step": 350 }, { "epoch": 0.190224570673712, "grad_norm": 16.683794021606445, "learning_rate": 9.365750528541227e-05, "loss": 7.1923, "step": 360 }, { "epoch": 0.1955085865257596, "grad_norm": 13.9892578125, "learning_rate": 9.348132487667372e-05, "loss": 6.2097, "step": 370 }, { "epoch": 0.20079260237780713, "grad_norm": 14.954909324645996, "learning_rate": 9.330514446793516e-05, "loss": 6.1171, "step": 380 }, { "epoch": 0.20607661822985468, "grad_norm": 23.484577178955078, "learning_rate": 9.312896405919662e-05, "loss": 6.2121, "step": 390 }, { "epoch": 0.21136063408190225, "grad_norm": 13.161015510559082, "learning_rate": 9.295278365045807e-05, "loss": 6.2938, "step": 400 }, { "epoch": 0.2166446499339498, "grad_norm": 19.971187591552734, "learning_rate": 9.277660324171952e-05, "loss": 5.8581, "step": 410 }, { "epoch": 0.22192866578599735, "grad_norm": 13.238492965698242, "learning_rate": 9.260042283298098e-05, "loss": 6.0769, "step": 420 }, { "epoch": 0.22721268163804492, "grad_norm": 17.053558349609375, "learning_rate": 9.242424242424242e-05, "loss": 6.5007, "step": 430 }, { "epoch": 0.23249669749009247, "grad_norm": 27.834871292114258, "learning_rate": 9.224806201550387e-05, "loss": 5.9163, "step": 440 }, { "epoch": 0.23778071334214002, "grad_norm": 36.51383972167969, "learning_rate": 9.207188160676533e-05, "loss": 6.7323, "step": 450 }, { "epoch": 0.2430647291941876, "grad_norm": 17.362863540649414, "learning_rate": 9.189570119802678e-05, "loss": 6.534, "step": 460 }, { "epoch": 0.24834874504623514, "grad_norm": 16.064483642578125, "learning_rate": 9.171952078928823e-05, "loss": 6.4153, "step": 470 }, { "epoch": 0.2536327608982827, "grad_norm": 18.7987003326416, "learning_rate": 9.154334038054969e-05, "loss": 5.5386, "step": 480 }, { "epoch": 0.25891677675033026, "grad_norm": 21.468870162963867, "learning_rate": 9.136715997181113e-05, "loss": 6.3368, "step": 490 }, { "epoch": 0.26420079260237783, "grad_norm": 15.937960624694824, "learning_rate": 9.119097956307258e-05, "loss": 5.5324, "step": 500 }, { "epoch": 0.26948480845442535, "grad_norm": 19.185712814331055, "learning_rate": 9.101479915433404e-05, "loss": 6.086, "step": 510 }, { "epoch": 0.2747688243064729, "grad_norm": 22.056148529052734, "learning_rate": 9.083861874559549e-05, "loss": 5.6768, "step": 520 }, { "epoch": 0.2800528401585205, "grad_norm": 28.70425796508789, "learning_rate": 9.066243833685694e-05, "loss": 5.8879, "step": 530 }, { "epoch": 0.285336856010568, "grad_norm": 15.535354614257812, "learning_rate": 9.04862579281184e-05, "loss": 7.4113, "step": 540 }, { "epoch": 0.2906208718626156, "grad_norm": 11.174695014953613, "learning_rate": 9.031007751937985e-05, "loss": 5.4265, "step": 550 }, { "epoch": 0.29590488771466317, "grad_norm": 17.119312286376953, "learning_rate": 9.013389711064129e-05, "loss": 5.7025, "step": 560 }, { "epoch": 0.3011889035667107, "grad_norm": 6.16047477722168, "learning_rate": 8.995771670190275e-05, "loss": 6.3164, "step": 570 }, { "epoch": 0.30647291941875826, "grad_norm": 57.1472053527832, "learning_rate": 8.97815362931642e-05, "loss": 6.2712, "step": 580 }, { "epoch": 0.31175693527080584, "grad_norm": 13.304646492004395, "learning_rate": 8.960535588442566e-05, "loss": 6.3801, "step": 590 }, { "epoch": 0.31704095112285335, "grad_norm": 62.073909759521484, "learning_rate": 8.942917547568711e-05, "loss": 5.9617, "step": 600 }, { "epoch": 0.32232496697490093, "grad_norm": 22.83144760131836, "learning_rate": 8.925299506694856e-05, "loss": 6.4754, "step": 610 }, { "epoch": 0.3276089828269485, "grad_norm": 11.822307586669922, "learning_rate": 8.907681465821e-05, "loss": 6.1844, "step": 620 }, { "epoch": 0.332892998678996, "grad_norm": 11.841323852539062, "learning_rate": 8.890063424947146e-05, "loss": 6.461, "step": 630 }, { "epoch": 0.3381770145310436, "grad_norm": 15.562223434448242, "learning_rate": 8.872445384073291e-05, "loss": 5.1181, "step": 640 }, { "epoch": 0.34346103038309117, "grad_norm": 20.331464767456055, "learning_rate": 8.854827343199437e-05, "loss": 5.6913, "step": 650 }, { "epoch": 0.3487450462351387, "grad_norm": 16.921146392822266, "learning_rate": 8.837209302325582e-05, "loss": 6.1924, "step": 660 }, { "epoch": 0.35402906208718626, "grad_norm": 20.378314971923828, "learning_rate": 8.819591261451727e-05, "loss": 6.1141, "step": 670 }, { "epoch": 0.35931307793923384, "grad_norm": 17.5389347076416, "learning_rate": 8.801973220577873e-05, "loss": 6.6286, "step": 680 }, { "epoch": 0.36459709379128136, "grad_norm": 18.35860252380371, "learning_rate": 8.784355179704017e-05, "loss": 5.3558, "step": 690 }, { "epoch": 0.36988110964332893, "grad_norm": 15.507783889770508, "learning_rate": 8.766737138830162e-05, "loss": 5.7434, "step": 700 }, { "epoch": 0.3751651254953765, "grad_norm": 15.035051345825195, "learning_rate": 8.749119097956308e-05, "loss": 5.3639, "step": 710 }, { "epoch": 0.380449141347424, "grad_norm": 16.45562744140625, "learning_rate": 8.731501057082453e-05, "loss": 5.8224, "step": 720 }, { "epoch": 0.3857331571994716, "grad_norm": 18.149444580078125, "learning_rate": 8.713883016208598e-05, "loss": 5.6189, "step": 730 }, { "epoch": 0.3910171730515192, "grad_norm": 14.932753562927246, "learning_rate": 8.696264975334744e-05, "loss": 6.0709, "step": 740 }, { "epoch": 0.3963011889035667, "grad_norm": 20.040613174438477, "learning_rate": 8.678646934460888e-05, "loss": 6.0672, "step": 750 }, { "epoch": 0.40158520475561427, "grad_norm": 17.960493087768555, "learning_rate": 8.661028893587033e-05, "loss": 6.0258, "step": 760 }, { "epoch": 0.40686922060766184, "grad_norm": 18.428001403808594, "learning_rate": 8.643410852713179e-05, "loss": 5.6467, "step": 770 }, { "epoch": 0.41215323645970936, "grad_norm": 44.34891891479492, "learning_rate": 8.625792811839324e-05, "loss": 6.5706, "step": 780 }, { "epoch": 0.41743725231175693, "grad_norm": 19.27741050720215, "learning_rate": 8.60817477096547e-05, "loss": 5.7313, "step": 790 }, { "epoch": 0.4227212681638045, "grad_norm": 17.21041488647461, "learning_rate": 8.590556730091615e-05, "loss": 5.2473, "step": 800 }, { "epoch": 0.42800528401585203, "grad_norm": 34.164329528808594, "learning_rate": 8.57293868921776e-05, "loss": 6.0723, "step": 810 }, { "epoch": 0.4332892998678996, "grad_norm": 19.111547470092773, "learning_rate": 8.555320648343904e-05, "loss": 5.7107, "step": 820 }, { "epoch": 0.4385733157199472, "grad_norm": 9.246485710144043, "learning_rate": 8.53770260747005e-05, "loss": 6.2144, "step": 830 }, { "epoch": 0.4438573315719947, "grad_norm": 11.372208595275879, "learning_rate": 8.520084566596195e-05, "loss": 6.0718, "step": 840 }, { "epoch": 0.44914134742404227, "grad_norm": 16.53830337524414, "learning_rate": 8.50246652572234e-05, "loss": 5.2095, "step": 850 }, { "epoch": 0.45442536327608984, "grad_norm": 15.624394416809082, "learning_rate": 8.484848484848486e-05, "loss": 5.9683, "step": 860 }, { "epoch": 0.45970937912813736, "grad_norm": 31.17705726623535, "learning_rate": 8.467230443974631e-05, "loss": 6.6641, "step": 870 }, { "epoch": 0.46499339498018494, "grad_norm": 15.525871276855469, "learning_rate": 8.449612403100775e-05, "loss": 6.1813, "step": 880 }, { "epoch": 0.4702774108322325, "grad_norm": 25.59772300720215, "learning_rate": 8.431994362226921e-05, "loss": 5.6976, "step": 890 }, { "epoch": 0.47556142668428003, "grad_norm": 19.93359375, "learning_rate": 8.414376321353066e-05, "loss": 6.2014, "step": 900 }, { "epoch": 0.4808454425363276, "grad_norm": 36.79905319213867, "learning_rate": 8.396758280479212e-05, "loss": 6.3585, "step": 910 }, { "epoch": 0.4861294583883752, "grad_norm": 14.733572959899902, "learning_rate": 8.379140239605357e-05, "loss": 6.7961, "step": 920 }, { "epoch": 0.4914134742404227, "grad_norm": 20.451168060302734, "learning_rate": 8.361522198731502e-05, "loss": 5.9698, "step": 930 }, { "epoch": 0.4966974900924703, "grad_norm": 21.818988800048828, "learning_rate": 8.343904157857648e-05, "loss": 5.6348, "step": 940 }, { "epoch": 0.5019815059445178, "grad_norm": 17.34412384033203, "learning_rate": 8.326286116983792e-05, "loss": 6.4502, "step": 950 }, { "epoch": 0.5072655217965654, "grad_norm": 17.00845718383789, "learning_rate": 8.308668076109937e-05, "loss": 5.9756, "step": 960 }, { "epoch": 0.512549537648613, "grad_norm": 17.681501388549805, "learning_rate": 8.291050035236083e-05, "loss": 5.1764, "step": 970 }, { "epoch": 0.5178335535006605, "grad_norm": 19.49733543395996, "learning_rate": 8.273431994362228e-05, "loss": 5.5531, "step": 980 }, { "epoch": 0.523117569352708, "grad_norm": 13.726716041564941, "learning_rate": 8.255813953488373e-05, "loss": 5.5033, "step": 990 }, { "epoch": 0.5284015852047557, "grad_norm": 13.990522384643555, "learning_rate": 8.238195912614517e-05, "loss": 5.5378, "step": 1000 }, { "epoch": 0.5336856010568032, "grad_norm": 12.668313980102539, "learning_rate": 8.220577871740663e-05, "loss": 6.2686, "step": 1010 }, { "epoch": 0.5389696169088507, "grad_norm": 19.316059112548828, "learning_rate": 8.202959830866808e-05, "loss": 6.0995, "step": 1020 }, { "epoch": 0.5442536327608983, "grad_norm": 6.63709831237793, "learning_rate": 8.185341789992954e-05, "loss": 5.2439, "step": 1030 }, { "epoch": 0.5495376486129459, "grad_norm": 19.178232192993164, "learning_rate": 8.167723749119099e-05, "loss": 6.4902, "step": 1040 }, { "epoch": 0.5548216644649934, "grad_norm": 20.179941177368164, "learning_rate": 8.150105708245243e-05, "loss": 6.3493, "step": 1050 }, { "epoch": 0.560105680317041, "grad_norm": 13.556324005126953, "learning_rate": 8.132487667371388e-05, "loss": 6.1359, "step": 1060 }, { "epoch": 0.5653896961690885, "grad_norm": 15.687406539916992, "learning_rate": 8.114869626497534e-05, "loss": 5.6453, "step": 1070 }, { "epoch": 0.570673712021136, "grad_norm": 21.37010383605957, "learning_rate": 8.097251585623679e-05, "loss": 6.5172, "step": 1080 }, { "epoch": 0.5759577278731837, "grad_norm": 19.941261291503906, "learning_rate": 8.079633544749823e-05, "loss": 6.2596, "step": 1090 }, { "epoch": 0.5812417437252312, "grad_norm": 15.84200668334961, "learning_rate": 8.062015503875969e-05, "loss": 5.1499, "step": 1100 }, { "epoch": 0.5865257595772787, "grad_norm": 16.59484100341797, "learning_rate": 8.044397463002114e-05, "loss": 5.5245, "step": 1110 }, { "epoch": 0.5918097754293263, "grad_norm": 18.21736717224121, "learning_rate": 8.02677942212826e-05, "loss": 5.8338, "step": 1120 }, { "epoch": 0.5970937912813739, "grad_norm": 23.575891494750977, "learning_rate": 8.009161381254405e-05, "loss": 6.7099, "step": 1130 }, { "epoch": 0.6023778071334214, "grad_norm": 20.20686912536621, "learning_rate": 7.991543340380549e-05, "loss": 6.0041, "step": 1140 }, { "epoch": 0.607661822985469, "grad_norm": 19.05695152282715, "learning_rate": 7.973925299506694e-05, "loss": 5.4691, "step": 1150 }, { "epoch": 0.6129458388375165, "grad_norm": 13.987954139709473, "learning_rate": 7.95630725863284e-05, "loss": 5.5048, "step": 1160 }, { "epoch": 0.618229854689564, "grad_norm": 11.669072151184082, "learning_rate": 7.938689217758985e-05, "loss": 5.5359, "step": 1170 }, { "epoch": 0.6235138705416117, "grad_norm": 19.21446990966797, "learning_rate": 7.92107117688513e-05, "loss": 5.3507, "step": 1180 }, { "epoch": 0.6287978863936592, "grad_norm": 18.533246994018555, "learning_rate": 7.903453136011276e-05, "loss": 6.5164, "step": 1190 }, { "epoch": 0.6340819022457067, "grad_norm": 31.197734832763672, "learning_rate": 7.885835095137421e-05, "loss": 5.2811, "step": 1200 }, { "epoch": 0.6393659180977543, "grad_norm": 17.063016891479492, "learning_rate": 7.868217054263565e-05, "loss": 6.1117, "step": 1210 }, { "epoch": 0.6446499339498019, "grad_norm": 17.40289306640625, "learning_rate": 7.850599013389711e-05, "loss": 5.5627, "step": 1220 }, { "epoch": 0.6499339498018494, "grad_norm": 17.07564926147461, "learning_rate": 7.832980972515856e-05, "loss": 5.2567, "step": 1230 }, { "epoch": 0.655217965653897, "grad_norm": 11.943973541259766, "learning_rate": 7.815362931642002e-05, "loss": 4.986, "step": 1240 }, { "epoch": 0.6605019815059445, "grad_norm": 20.174036026000977, "learning_rate": 7.797744890768147e-05, "loss": 5.1765, "step": 1250 }, { "epoch": 0.665785997357992, "grad_norm": 23.21563148498535, "learning_rate": 7.780126849894292e-05, "loss": 5.572, "step": 1260 }, { "epoch": 0.6710700132100397, "grad_norm": 14.952116966247559, "learning_rate": 7.762508809020436e-05, "loss": 5.1001, "step": 1270 }, { "epoch": 0.6763540290620872, "grad_norm": 13.655055046081543, "learning_rate": 7.744890768146582e-05, "loss": 6.1625, "step": 1280 }, { "epoch": 0.6816380449141347, "grad_norm": 25.46530532836914, "learning_rate": 7.727272727272727e-05, "loss": 5.6293, "step": 1290 }, { "epoch": 0.6869220607661823, "grad_norm": 30.698287963867188, "learning_rate": 7.709654686398873e-05, "loss": 6.2168, "step": 1300 }, { "epoch": 0.6922060766182299, "grad_norm": 16.65216827392578, "learning_rate": 7.692036645525018e-05, "loss": 5.6418, "step": 1310 }, { "epoch": 0.6974900924702774, "grad_norm": 28.82203483581543, "learning_rate": 7.674418604651163e-05, "loss": 5.5397, "step": 1320 }, { "epoch": 0.702774108322325, "grad_norm": 20.495399475097656, "learning_rate": 7.656800563777309e-05, "loss": 5.4648, "step": 1330 }, { "epoch": 0.7080581241743725, "grad_norm": 12.824000358581543, "learning_rate": 7.639182522903453e-05, "loss": 5.4236, "step": 1340 }, { "epoch": 0.71334214002642, "grad_norm": 12.849726676940918, "learning_rate": 7.621564482029598e-05, "loss": 5.753, "step": 1350 }, { "epoch": 0.7186261558784677, "grad_norm": 27.25946044921875, "learning_rate": 7.603946441155744e-05, "loss": 5.0487, "step": 1360 }, { "epoch": 0.7239101717305152, "grad_norm": 15.405123710632324, "learning_rate": 7.586328400281889e-05, "loss": 5.7386, "step": 1370 }, { "epoch": 0.7291941875825627, "grad_norm": 10.412837028503418, "learning_rate": 7.568710359408034e-05, "loss": 5.5336, "step": 1380 }, { "epoch": 0.7344782034346103, "grad_norm": 10.340492248535156, "learning_rate": 7.55109231853418e-05, "loss": 5.4025, "step": 1390 }, { "epoch": 0.7397622192866579, "grad_norm": 14.122332572937012, "learning_rate": 7.533474277660324e-05, "loss": 5.8226, "step": 1400 }, { "epoch": 0.7450462351387054, "grad_norm": 16.243783950805664, "learning_rate": 7.515856236786469e-05, "loss": 5.2315, "step": 1410 }, { "epoch": 0.750330250990753, "grad_norm": 41.024452209472656, "learning_rate": 7.498238195912615e-05, "loss": 5.5957, "step": 1420 }, { "epoch": 0.7556142668428005, "grad_norm": 17.766620635986328, "learning_rate": 7.48062015503876e-05, "loss": 6.34, "step": 1430 }, { "epoch": 0.760898282694848, "grad_norm": 58.097171783447266, "learning_rate": 7.463002114164905e-05, "loss": 5.5743, "step": 1440 }, { "epoch": 0.7661822985468957, "grad_norm": 19.869304656982422, "learning_rate": 7.445384073291051e-05, "loss": 6.0175, "step": 1450 }, { "epoch": 0.7714663143989432, "grad_norm": 22.49924087524414, "learning_rate": 7.427766032417196e-05, "loss": 5.9537, "step": 1460 }, { "epoch": 0.7767503302509907, "grad_norm": 17.264482498168945, "learning_rate": 7.41014799154334e-05, "loss": 6.2231, "step": 1470 }, { "epoch": 0.7820343461030383, "grad_norm": 29.350412368774414, "learning_rate": 7.392529950669486e-05, "loss": 6.0145, "step": 1480 }, { "epoch": 0.7873183619550859, "grad_norm": 17.895017623901367, "learning_rate": 7.374911909795631e-05, "loss": 5.5755, "step": 1490 }, { "epoch": 0.7926023778071334, "grad_norm": 12.318058013916016, "learning_rate": 7.357293868921777e-05, "loss": 5.7527, "step": 1500 }, { "epoch": 0.797886393659181, "grad_norm": 22.55769157409668, "learning_rate": 7.339675828047922e-05, "loss": 5.8042, "step": 1510 }, { "epoch": 0.8031704095112285, "grad_norm": 24.99711799621582, "learning_rate": 7.322057787174067e-05, "loss": 6.2934, "step": 1520 }, { "epoch": 0.808454425363276, "grad_norm": 18.1428165435791, "learning_rate": 7.304439746300211e-05, "loss": 5.4993, "step": 1530 }, { "epoch": 0.8137384412153237, "grad_norm": 13.404719352722168, "learning_rate": 7.286821705426357e-05, "loss": 5.8874, "step": 1540 }, { "epoch": 0.8190224570673712, "grad_norm": 30.949617385864258, "learning_rate": 7.269203664552502e-05, "loss": 5.7187, "step": 1550 }, { "epoch": 0.8243064729194187, "grad_norm": 17.71588706970215, "learning_rate": 7.251585623678648e-05, "loss": 5.8959, "step": 1560 }, { "epoch": 0.8295904887714664, "grad_norm": 25.201427459716797, "learning_rate": 7.233967582804793e-05, "loss": 5.7043, "step": 1570 }, { "epoch": 0.8348745046235139, "grad_norm": 20.064821243286133, "learning_rate": 7.216349541930938e-05, "loss": 5.7317, "step": 1580 }, { "epoch": 0.8401585204755614, "grad_norm": 11.099620819091797, "learning_rate": 7.198731501057084e-05, "loss": 5.5656, "step": 1590 }, { "epoch": 0.845442536327609, "grad_norm": 33.0284309387207, "learning_rate": 7.181113460183228e-05, "loss": 5.5059, "step": 1600 }, { "epoch": 0.8507265521796565, "grad_norm": 17.941001892089844, "learning_rate": 7.163495419309373e-05, "loss": 5.1968, "step": 1610 }, { "epoch": 0.8560105680317041, "grad_norm": 22.190805435180664, "learning_rate": 7.145877378435519e-05, "loss": 5.5478, "step": 1620 }, { "epoch": 0.8612945838837517, "grad_norm": 33.026039123535156, "learning_rate": 7.128259337561664e-05, "loss": 5.7798, "step": 1630 }, { "epoch": 0.8665785997357992, "grad_norm": 13.09423542022705, "learning_rate": 7.11064129668781e-05, "loss": 5.3289, "step": 1640 }, { "epoch": 0.8718626155878467, "grad_norm": 25.052522659301758, "learning_rate": 7.093023255813955e-05, "loss": 5.4192, "step": 1650 }, { "epoch": 0.8771466314398944, "grad_norm": 13.149133682250977, "learning_rate": 7.075405214940099e-05, "loss": 5.6466, "step": 1660 }, { "epoch": 0.8824306472919419, "grad_norm": 16.968976974487305, "learning_rate": 7.057787174066244e-05, "loss": 5.0864, "step": 1670 }, { "epoch": 0.8877146631439894, "grad_norm": 21.870317459106445, "learning_rate": 7.04016913319239e-05, "loss": 6.7305, "step": 1680 }, { "epoch": 0.892998678996037, "grad_norm": 15.00245475769043, "learning_rate": 7.022551092318535e-05, "loss": 5.8771, "step": 1690 }, { "epoch": 0.8982826948480845, "grad_norm": 8.314766883850098, "learning_rate": 7.00493305144468e-05, "loss": 5.7089, "step": 1700 }, { "epoch": 0.9035667107001321, "grad_norm": 16.795713424682617, "learning_rate": 6.987315010570825e-05, "loss": 6.6234, "step": 1710 }, { "epoch": 0.9088507265521797, "grad_norm": 17.380340576171875, "learning_rate": 6.96969696969697e-05, "loss": 5.5966, "step": 1720 }, { "epoch": 0.9141347424042272, "grad_norm": 17.289960861206055, "learning_rate": 6.952078928823115e-05, "loss": 5.5688, "step": 1730 }, { "epoch": 0.9194187582562747, "grad_norm": 12.64680004119873, "learning_rate": 6.934460887949261e-05, "loss": 5.7903, "step": 1740 }, { "epoch": 0.9247027741083224, "grad_norm": 16.41197395324707, "learning_rate": 6.916842847075405e-05, "loss": 5.3373, "step": 1750 }, { "epoch": 0.9299867899603699, "grad_norm": 12.417819023132324, "learning_rate": 6.89922480620155e-05, "loss": 6.5647, "step": 1760 }, { "epoch": 0.9352708058124174, "grad_norm": 40.8532829284668, "learning_rate": 6.881606765327696e-05, "loss": 5.2265, "step": 1770 }, { "epoch": 0.940554821664465, "grad_norm": 16.69026756286621, "learning_rate": 6.863988724453841e-05, "loss": 6.1758, "step": 1780 }, { "epoch": 0.9458388375165125, "grad_norm": 17.642581939697266, "learning_rate": 6.846370683579986e-05, "loss": 5.3422, "step": 1790 }, { "epoch": 0.9511228533685601, "grad_norm": 30.436647415161133, "learning_rate": 6.82875264270613e-05, "loss": 5.3749, "step": 1800 }, { "epoch": 0.9564068692206077, "grad_norm": 43.051876068115234, "learning_rate": 6.811134601832276e-05, "loss": 6.3454, "step": 1810 }, { "epoch": 0.9616908850726552, "grad_norm": 15.256926536560059, "learning_rate": 6.793516560958421e-05, "loss": 4.6911, "step": 1820 }, { "epoch": 0.9669749009247027, "grad_norm": 20.813745498657227, "learning_rate": 6.775898520084567e-05, "loss": 5.2599, "step": 1830 }, { "epoch": 0.9722589167767504, "grad_norm": 15.09667682647705, "learning_rate": 6.758280479210712e-05, "loss": 6.5782, "step": 1840 }, { "epoch": 0.9775429326287979, "grad_norm": 10.933744430541992, "learning_rate": 6.740662438336857e-05, "loss": 5.7958, "step": 1850 }, { "epoch": 0.9828269484808454, "grad_norm": 16.02846336364746, "learning_rate": 6.723044397463001e-05, "loss": 5.6897, "step": 1860 }, { "epoch": 0.988110964332893, "grad_norm": 29.419384002685547, "learning_rate": 6.705426356589147e-05, "loss": 6.2747, "step": 1870 }, { "epoch": 0.9933949801849405, "grad_norm": 17.165990829467773, "learning_rate": 6.687808315715292e-05, "loss": 5.0474, "step": 1880 }, { "epoch": 0.9986789960369881, "grad_norm": 14.135002136230469, "learning_rate": 6.670190274841438e-05, "loss": 5.6133, "step": 1890 }, { "epoch": 1.0036988110964333, "grad_norm": 9.446295738220215, "learning_rate": 6.652572233967583e-05, "loss": 4.5266, "step": 1900 }, { "epoch": 1.0089828269484808, "grad_norm": 13.480342864990234, "learning_rate": 6.634954193093728e-05, "loss": 4.5775, "step": 1910 }, { "epoch": 1.0142668428005284, "grad_norm": 17.177719116210938, "learning_rate": 6.617336152219874e-05, "loss": 4.7144, "step": 1920 }, { "epoch": 1.0195508586525759, "grad_norm": 20.066877365112305, "learning_rate": 6.599718111346018e-05, "loss": 4.1982, "step": 1930 }, { "epoch": 1.0248348745046234, "grad_norm": 26.720029830932617, "learning_rate": 6.582100070472163e-05, "loss": 4.2676, "step": 1940 }, { "epoch": 1.0301188903566711, "grad_norm": 23.155075073242188, "learning_rate": 6.564482029598309e-05, "loss": 4.2762, "step": 1950 }, { "epoch": 1.0354029062087187, "grad_norm": 11.05765151977539, "learning_rate": 6.546863988724454e-05, "loss": 4.4655, "step": 1960 }, { "epoch": 1.0406869220607662, "grad_norm": 19.037799835205078, "learning_rate": 6.5292459478506e-05, "loss": 4.4159, "step": 1970 }, { "epoch": 1.0459709379128137, "grad_norm": 20.42670249938965, "learning_rate": 6.511627906976745e-05, "loss": 4.3828, "step": 1980 }, { "epoch": 1.0512549537648612, "grad_norm": 18.25569725036621, "learning_rate": 6.494009866102889e-05, "loss": 4.7867, "step": 1990 }, { "epoch": 1.0565389696169087, "grad_norm": 8.051241874694824, "learning_rate": 6.476391825229034e-05, "loss": 4.5032, "step": 2000 }, { "epoch": 1.0618229854689565, "grad_norm": 15.53282642364502, "learning_rate": 6.45877378435518e-05, "loss": 3.912, "step": 2010 }, { "epoch": 1.067107001321004, "grad_norm": 17.53829574584961, "learning_rate": 6.441155743481325e-05, "loss": 4.1812, "step": 2020 }, { "epoch": 1.0723910171730515, "grad_norm": 17.04258918762207, "learning_rate": 6.42353770260747e-05, "loss": 4.0792, "step": 2030 }, { "epoch": 1.077675033025099, "grad_norm": 11.775443077087402, "learning_rate": 6.405919661733616e-05, "loss": 4.6554, "step": 2040 }, { "epoch": 1.0829590488771466, "grad_norm": 13.738333702087402, "learning_rate": 6.388301620859761e-05, "loss": 4.1905, "step": 2050 }, { "epoch": 1.0882430647291943, "grad_norm": 15.163809776306152, "learning_rate": 6.370683579985905e-05, "loss": 4.3531, "step": 2060 }, { "epoch": 1.0935270805812418, "grad_norm": 12.641834259033203, "learning_rate": 6.353065539112051e-05, "loss": 4.4543, "step": 2070 }, { "epoch": 1.0988110964332893, "grad_norm": 17.700624465942383, "learning_rate": 6.335447498238196e-05, "loss": 4.544, "step": 2080 }, { "epoch": 1.1040951122853369, "grad_norm": 21.184234619140625, "learning_rate": 6.317829457364342e-05, "loss": 4.6108, "step": 2090 }, { "epoch": 1.1093791281373844, "grad_norm": 15.002963066101074, "learning_rate": 6.300211416490487e-05, "loss": 5.3031, "step": 2100 }, { "epoch": 1.114663143989432, "grad_norm": 12.60302734375, "learning_rate": 6.282593375616632e-05, "loss": 4.0069, "step": 2110 }, { "epoch": 1.1199471598414794, "grad_norm": 20.0040225982666, "learning_rate": 6.264975334742776e-05, "loss": 5.0447, "step": 2120 }, { "epoch": 1.1252311756935272, "grad_norm": 29.347444534301758, "learning_rate": 6.247357293868922e-05, "loss": 5.0402, "step": 2130 }, { "epoch": 1.1305151915455747, "grad_norm": 13.593704223632812, "learning_rate": 6.229739252995067e-05, "loss": 4.1444, "step": 2140 }, { "epoch": 1.1357992073976222, "grad_norm": 15.253348350524902, "learning_rate": 6.212121212121213e-05, "loss": 4.2521, "step": 2150 }, { "epoch": 1.1410832232496697, "grad_norm": 19.010047912597656, "learning_rate": 6.194503171247358e-05, "loss": 4.3603, "step": 2160 }, { "epoch": 1.1463672391017172, "grad_norm": 21.425832748413086, "learning_rate": 6.176885130373503e-05, "loss": 4.5611, "step": 2170 }, { "epoch": 1.151651254953765, "grad_norm": 13.638286590576172, "learning_rate": 6.159267089499649e-05, "loss": 4.488, "step": 2180 }, { "epoch": 1.1569352708058125, "grad_norm": 11.201618194580078, "learning_rate": 6.141649048625793e-05, "loss": 4.4993, "step": 2190 }, { "epoch": 1.16221928665786, "grad_norm": 10.205597877502441, "learning_rate": 6.124031007751938e-05, "loss": 4.4236, "step": 2200 }, { "epoch": 1.1675033025099075, "grad_norm": 11.957306861877441, "learning_rate": 6.106412966878084e-05, "loss": 4.7035, "step": 2210 }, { "epoch": 1.172787318361955, "grad_norm": 17.150300979614258, "learning_rate": 6.088794926004229e-05, "loss": 4.6598, "step": 2220 }, { "epoch": 1.1780713342140026, "grad_norm": 11.388029098510742, "learning_rate": 6.071176885130374e-05, "loss": 3.9603, "step": 2230 }, { "epoch": 1.18335535006605, "grad_norm": 14.71832275390625, "learning_rate": 6.053558844256519e-05, "loss": 4.329, "step": 2240 }, { "epoch": 1.1886393659180978, "grad_norm": 25.499982833862305, "learning_rate": 6.035940803382664e-05, "loss": 4.8266, "step": 2250 }, { "epoch": 1.1939233817701453, "grad_norm": 24.240766525268555, "learning_rate": 6.018322762508809e-05, "loss": 4.9288, "step": 2260 }, { "epoch": 1.1992073976221929, "grad_norm": 9.924399375915527, "learning_rate": 6.000704721634954e-05, "loss": 4.1202, "step": 2270 }, { "epoch": 1.2044914134742404, "grad_norm": 19.63318634033203, "learning_rate": 5.9830866807610994e-05, "loss": 4.2551, "step": 2280 }, { "epoch": 1.209775429326288, "grad_norm": 23.672225952148438, "learning_rate": 5.965468639887245e-05, "loss": 4.4992, "step": 2290 }, { "epoch": 1.2150594451783356, "grad_norm": 23.38901710510254, "learning_rate": 5.94785059901339e-05, "loss": 4.6141, "step": 2300 }, { "epoch": 1.2203434610303832, "grad_norm": 41.691383361816406, "learning_rate": 5.9302325581395356e-05, "loss": 4.6338, "step": 2310 }, { "epoch": 1.2256274768824307, "grad_norm": 22.216320037841797, "learning_rate": 5.9126145172656797e-05, "loss": 4.6896, "step": 2320 }, { "epoch": 1.2309114927344782, "grad_norm": 16.754959106445312, "learning_rate": 5.894996476391825e-05, "loss": 4.282, "step": 2330 }, { "epoch": 1.2361955085865257, "grad_norm": 14.538607597351074, "learning_rate": 5.8773784355179705e-05, "loss": 4.1492, "step": 2340 }, { "epoch": 1.2414795244385732, "grad_norm": 27.08867835998535, "learning_rate": 5.859760394644116e-05, "loss": 3.8655, "step": 2350 }, { "epoch": 1.2467635402906208, "grad_norm": 13.51925277709961, "learning_rate": 5.842142353770261e-05, "loss": 4.028, "step": 2360 }, { "epoch": 1.2520475561426685, "grad_norm": 32.462825775146484, "learning_rate": 5.8245243128964067e-05, "loss": 4.7505, "step": 2370 }, { "epoch": 1.257331571994716, "grad_norm": 23.669504165649414, "learning_rate": 5.806906272022551e-05, "loss": 4.7846, "step": 2380 }, { "epoch": 1.2626155878467635, "grad_norm": 18.8544979095459, "learning_rate": 5.789288231148696e-05, "loss": 4.3953, "step": 2390 }, { "epoch": 1.267899603698811, "grad_norm": 23.200498580932617, "learning_rate": 5.7716701902748415e-05, "loss": 4.2397, "step": 2400 }, { "epoch": 1.2731836195508586, "grad_norm": 16.031267166137695, "learning_rate": 5.754052149400987e-05, "loss": 4.8908, "step": 2410 }, { "epoch": 1.2784676354029063, "grad_norm": 23.778419494628906, "learning_rate": 5.736434108527132e-05, "loss": 5.5319, "step": 2420 }, { "epoch": 1.2837516512549538, "grad_norm": 15.773674964904785, "learning_rate": 5.718816067653278e-05, "loss": 4.1609, "step": 2430 }, { "epoch": 1.2890356671070013, "grad_norm": 32.87483596801758, "learning_rate": 5.701198026779423e-05, "loss": 4.6422, "step": 2440 }, { "epoch": 1.2943196829590489, "grad_norm": 18.543075561523438, "learning_rate": 5.683579985905567e-05, "loss": 3.7852, "step": 2450 }, { "epoch": 1.2996036988110964, "grad_norm": 24.7188663482666, "learning_rate": 5.6659619450317125e-05, "loss": 5.2381, "step": 2460 }, { "epoch": 1.3048877146631441, "grad_norm": 12.012070655822754, "learning_rate": 5.648343904157858e-05, "loss": 4.1263, "step": 2470 }, { "epoch": 1.3101717305151914, "grad_norm": 15.271008491516113, "learning_rate": 5.630725863284003e-05, "loss": 4.0491, "step": 2480 }, { "epoch": 1.3154557463672392, "grad_norm": 15.225932121276855, "learning_rate": 5.613107822410149e-05, "loss": 4.5284, "step": 2490 }, { "epoch": 1.3207397622192867, "grad_norm": 30.377145767211914, "learning_rate": 5.595489781536294e-05, "loss": 4.5636, "step": 2500 }, { "epoch": 1.3260237780713342, "grad_norm": 17.262067794799805, "learning_rate": 5.577871740662438e-05, "loss": 5.1397, "step": 2510 }, { "epoch": 1.3313077939233817, "grad_norm": 15.354004859924316, "learning_rate": 5.5602536997885836e-05, "loss": 4.4136, "step": 2520 }, { "epoch": 1.3365918097754292, "grad_norm": 17.31847381591797, "learning_rate": 5.542635658914729e-05, "loss": 4.7305, "step": 2530 }, { "epoch": 1.341875825627477, "grad_norm": 18.43682098388672, "learning_rate": 5.5250176180408744e-05, "loss": 4.8726, "step": 2540 }, { "epoch": 1.3471598414795245, "grad_norm": 12.398991584777832, "learning_rate": 5.50739957716702e-05, "loss": 4.137, "step": 2550 }, { "epoch": 1.352443857331572, "grad_norm": 33.653724670410156, "learning_rate": 5.4897815362931645e-05, "loss": 4.5122, "step": 2560 }, { "epoch": 1.3577278731836195, "grad_norm": 17.029422760009766, "learning_rate": 5.47216349541931e-05, "loss": 4.8336, "step": 2570 }, { "epoch": 1.363011889035667, "grad_norm": 29.74364471435547, "learning_rate": 5.4545454545454546e-05, "loss": 4.9444, "step": 2580 }, { "epoch": 1.3682959048877148, "grad_norm": 32.33039093017578, "learning_rate": 5.4369274136716e-05, "loss": 4.7353, "step": 2590 }, { "epoch": 1.373579920739762, "grad_norm": 17.53461456298828, "learning_rate": 5.4193093727977454e-05, "loss": 4.9798, "step": 2600 }, { "epoch": 1.3788639365918098, "grad_norm": 16.082250595092773, "learning_rate": 5.40169133192389e-05, "loss": 4.3158, "step": 2610 }, { "epoch": 1.3841479524438574, "grad_norm": 33.302085876464844, "learning_rate": 5.3840732910500355e-05, "loss": 4.4121, "step": 2620 }, { "epoch": 1.3894319682959049, "grad_norm": 26.340059280395508, "learning_rate": 5.366455250176181e-05, "loss": 4.7952, "step": 2630 }, { "epoch": 1.3947159841479524, "grad_norm": 23.386083602905273, "learning_rate": 5.348837209302326e-05, "loss": 4.2726, "step": 2640 }, { "epoch": 1.4, "grad_norm": 18.52869987487793, "learning_rate": 5.3312191684284704e-05, "loss": 4.1329, "step": 2650 }, { "epoch": 1.4052840158520477, "grad_norm": 17.281587600708008, "learning_rate": 5.313601127554616e-05, "loss": 4.8518, "step": 2660 }, { "epoch": 1.4105680317040952, "grad_norm": 14.406935691833496, "learning_rate": 5.295983086680761e-05, "loss": 3.89, "step": 2670 }, { "epoch": 1.4158520475561427, "grad_norm": 24.403560638427734, "learning_rate": 5.2783650458069066e-05, "loss": 4.702, "step": 2680 }, { "epoch": 1.4211360634081902, "grad_norm": 16.73262596130371, "learning_rate": 5.260747004933052e-05, "loss": 4.2663, "step": 2690 }, { "epoch": 1.4264200792602377, "grad_norm": 28.891759872436523, "learning_rate": 5.2431289640591974e-05, "loss": 4.7317, "step": 2700 }, { "epoch": 1.4317040951122855, "grad_norm": 22.920183181762695, "learning_rate": 5.2255109231853414e-05, "loss": 4.1046, "step": 2710 }, { "epoch": 1.4369881109643328, "grad_norm": 15.954876899719238, "learning_rate": 5.207892882311487e-05, "loss": 4.6354, "step": 2720 }, { "epoch": 1.4422721268163805, "grad_norm": 18.74956512451172, "learning_rate": 5.190274841437632e-05, "loss": 4.3077, "step": 2730 }, { "epoch": 1.447556142668428, "grad_norm": 14.520437240600586, "learning_rate": 5.1726568005637776e-05, "loss": 4.1104, "step": 2740 }, { "epoch": 1.4528401585204755, "grad_norm": 19.05140495300293, "learning_rate": 5.155038759689923e-05, "loss": 4.6874, "step": 2750 }, { "epoch": 1.458124174372523, "grad_norm": 14.033745765686035, "learning_rate": 5.1374207188160684e-05, "loss": 4.6927, "step": 2760 }, { "epoch": 1.4634081902245706, "grad_norm": 13.549539566040039, "learning_rate": 5.1198026779422125e-05, "loss": 4.5559, "step": 2770 }, { "epoch": 1.4686922060766183, "grad_norm": 18.799474716186523, "learning_rate": 5.102184637068358e-05, "loss": 3.7364, "step": 2780 }, { "epoch": 1.4739762219286658, "grad_norm": 22.91070556640625, "learning_rate": 5.084566596194503e-05, "loss": 3.8496, "step": 2790 }, { "epoch": 1.4792602377807134, "grad_norm": 20.70958709716797, "learning_rate": 5.066948555320649e-05, "loss": 4.915, "step": 2800 }, { "epoch": 1.4845442536327609, "grad_norm": 14.737801551818848, "learning_rate": 5.049330514446794e-05, "loss": 4.0253, "step": 2810 }, { "epoch": 1.4898282694848084, "grad_norm": 37.05099105834961, "learning_rate": 5.0317124735729395e-05, "loss": 4.5186, "step": 2820 }, { "epoch": 1.4951122853368561, "grad_norm": 13.765185356140137, "learning_rate": 5.014094432699085e-05, "loss": 4.0766, "step": 2830 }, { "epoch": 1.5003963011889034, "grad_norm": 27.430612564086914, "learning_rate": 4.9964763918252296e-05, "loss": 4.1136, "step": 2840 }, { "epoch": 1.5056803170409512, "grad_norm": 23.646238327026367, "learning_rate": 4.978858350951374e-05, "loss": 4.114, "step": 2850 }, { "epoch": 1.5109643328929987, "grad_norm": 21.7750186920166, "learning_rate": 4.96124031007752e-05, "loss": 4.8769, "step": 2860 }, { "epoch": 1.5162483487450462, "grad_norm": 14.568818092346191, "learning_rate": 4.943622269203665e-05, "loss": 4.1769, "step": 2870 }, { "epoch": 1.521532364597094, "grad_norm": 17.075746536254883, "learning_rate": 4.9260042283298105e-05, "loss": 4.3407, "step": 2880 }, { "epoch": 1.5268163804491413, "grad_norm": 20.66181755065918, "learning_rate": 4.908386187455955e-05, "loss": 5.1013, "step": 2890 }, { "epoch": 1.532100396301189, "grad_norm": 26.05691146850586, "learning_rate": 4.8907681465821006e-05, "loss": 4.7904, "step": 2900 }, { "epoch": 1.5373844121532365, "grad_norm": 32.075687408447266, "learning_rate": 4.873150105708246e-05, "loss": 3.9514, "step": 2910 }, { "epoch": 1.542668428005284, "grad_norm": 13.932994842529297, "learning_rate": 4.855532064834391e-05, "loss": 4.6773, "step": 2920 }, { "epoch": 1.5479524438573316, "grad_norm": 28.909257888793945, "learning_rate": 4.837914023960536e-05, "loss": 4.5672, "step": 2930 }, { "epoch": 1.553236459709379, "grad_norm": 23.652591705322266, "learning_rate": 4.820295983086681e-05, "loss": 4.5208, "step": 2940 }, { "epoch": 1.5585204755614268, "grad_norm": 27.532529830932617, "learning_rate": 4.802677942212826e-05, "loss": 4.7889, "step": 2950 }, { "epoch": 1.563804491413474, "grad_norm": 17.20075035095215, "learning_rate": 4.785059901338971e-05, "loss": 4.3424, "step": 2960 }, { "epoch": 1.5690885072655218, "grad_norm": 16.50057601928711, "learning_rate": 4.7674418604651164e-05, "loss": 4.7089, "step": 2970 }, { "epoch": 1.5743725231175694, "grad_norm": 11.980966567993164, "learning_rate": 4.749823819591261e-05, "loss": 4.2287, "step": 2980 }, { "epoch": 1.5796565389696169, "grad_norm": 16.405160903930664, "learning_rate": 4.7322057787174065e-05, "loss": 4.0863, "step": 2990 }, { "epoch": 1.5849405548216646, "grad_norm": 17.167919158935547, "learning_rate": 4.714587737843552e-05, "loss": 4.3048, "step": 3000 }, { "epoch": 1.590224570673712, "grad_norm": 27.166318893432617, "learning_rate": 4.696969696969697e-05, "loss": 4.8536, "step": 3010 }, { "epoch": 1.5955085865257597, "grad_norm": 14.062549591064453, "learning_rate": 4.679351656095842e-05, "loss": 4.2515, "step": 3020 }, { "epoch": 1.6007926023778072, "grad_norm": 11.004928588867188, "learning_rate": 4.6617336152219874e-05, "loss": 3.8693, "step": 3030 }, { "epoch": 1.6060766182298547, "grad_norm": 12.106989860534668, "learning_rate": 4.644115574348133e-05, "loss": 4.2033, "step": 3040 }, { "epoch": 1.6113606340819022, "grad_norm": 20.106136322021484, "learning_rate": 4.6264975334742776e-05, "loss": 3.9254, "step": 3050 }, { "epoch": 1.6166446499339497, "grad_norm": 28.118345260620117, "learning_rate": 4.608879492600423e-05, "loss": 4.4722, "step": 3060 }, { "epoch": 1.6219286657859975, "grad_norm": 22.239704132080078, "learning_rate": 4.5912614517265684e-05, "loss": 4.5367, "step": 3070 }, { "epoch": 1.6272126816380448, "grad_norm": 20.649337768554688, "learning_rate": 4.573643410852713e-05, "loss": 4.1882, "step": 3080 }, { "epoch": 1.6324966974900925, "grad_norm": 18.089635848999023, "learning_rate": 4.5560253699788585e-05, "loss": 4.8611, "step": 3090 }, { "epoch": 1.63778071334214, "grad_norm": 16.4549503326416, "learning_rate": 4.538407329105004e-05, "loss": 3.9139, "step": 3100 }, { "epoch": 1.6430647291941876, "grad_norm": 12.388816833496094, "learning_rate": 4.5207892882311486e-05, "loss": 4.5639, "step": 3110 }, { "epoch": 1.6483487450462353, "grad_norm": 12.118916511535645, "learning_rate": 4.503171247357294e-05, "loss": 4.816, "step": 3120 }, { "epoch": 1.6536327608982826, "grad_norm": 13.67740249633789, "learning_rate": 4.4855532064834394e-05, "loss": 5.3883, "step": 3130 }, { "epoch": 1.6589167767503303, "grad_norm": 42.89795684814453, "learning_rate": 4.467935165609585e-05, "loss": 4.4219, "step": 3140 }, { "epoch": 1.6642007926023779, "grad_norm": 16.99205780029297, "learning_rate": 4.4503171247357295e-05, "loss": 4.3594, "step": 3150 }, { "epoch": 1.6694848084544254, "grad_norm": 22.239952087402344, "learning_rate": 4.432699083861875e-05, "loss": 4.2405, "step": 3160 }, { "epoch": 1.674768824306473, "grad_norm": 15.229833602905273, "learning_rate": 4.41508104298802e-05, "loss": 4.3056, "step": 3170 }, { "epoch": 1.6800528401585204, "grad_norm": 20.852170944213867, "learning_rate": 4.397463002114165e-05, "loss": 3.9653, "step": 3180 }, { "epoch": 1.6853368560105682, "grad_norm": 20.276960372924805, "learning_rate": 4.3798449612403104e-05, "loss": 4.6102, "step": 3190 }, { "epoch": 1.6906208718626154, "grad_norm": 18.84306526184082, "learning_rate": 4.362226920366456e-05, "loss": 4.8976, "step": 3200 }, { "epoch": 1.6959048877146632, "grad_norm": 16.7408390045166, "learning_rate": 4.3446088794926006e-05, "loss": 4.1674, "step": 3210 }, { "epoch": 1.7011889035667107, "grad_norm": 15.583345413208008, "learning_rate": 4.326990838618746e-05, "loss": 4.6511, "step": 3220 }, { "epoch": 1.7064729194187582, "grad_norm": 23.10440444946289, "learning_rate": 4.3093727977448914e-05, "loss": 4.3704, "step": 3230 }, { "epoch": 1.711756935270806, "grad_norm": 15.978655815124512, "learning_rate": 4.291754756871036e-05, "loss": 4.2902, "step": 3240 }, { "epoch": 1.7170409511228533, "grad_norm": 15.317334175109863, "learning_rate": 4.2741367159971815e-05, "loss": 4.7246, "step": 3250 }, { "epoch": 1.722324966974901, "grad_norm": 16.328771591186523, "learning_rate": 4.256518675123327e-05, "loss": 4.499, "step": 3260 }, { "epoch": 1.7276089828269485, "grad_norm": 17.903669357299805, "learning_rate": 4.2389006342494716e-05, "loss": 4.4711, "step": 3270 }, { "epoch": 1.732892998678996, "grad_norm": 31.415544509887695, "learning_rate": 4.221282593375617e-05, "loss": 5.7955, "step": 3280 }, { "epoch": 1.7381770145310436, "grad_norm": 30.652788162231445, "learning_rate": 4.203664552501762e-05, "loss": 5.5553, "step": 3290 }, { "epoch": 1.743461030383091, "grad_norm": 20.82811164855957, "learning_rate": 4.186046511627907e-05, "loss": 4.616, "step": 3300 }, { "epoch": 1.7487450462351388, "grad_norm": 37.25524139404297, "learning_rate": 4.168428470754052e-05, "loss": 4.4218, "step": 3310 }, { "epoch": 1.7540290620871861, "grad_norm": 20.012208938598633, "learning_rate": 4.150810429880197e-05, "loss": 4.2622, "step": 3320 }, { "epoch": 1.7593130779392339, "grad_norm": 15.57703685760498, "learning_rate": 4.1331923890063427e-05, "loss": 4.7804, "step": 3330 }, { "epoch": 1.7645970937912814, "grad_norm": 21.189626693725586, "learning_rate": 4.1155743481324874e-05, "loss": 4.2022, "step": 3340 }, { "epoch": 1.769881109643329, "grad_norm": 16.452083587646484, "learning_rate": 4.097956307258633e-05, "loss": 4.395, "step": 3350 }, { "epoch": 1.7751651254953766, "grad_norm": 24.379024505615234, "learning_rate": 4.080338266384778e-05, "loss": 5.4778, "step": 3360 }, { "epoch": 1.780449141347424, "grad_norm": 16.71044921875, "learning_rate": 4.062720225510923e-05, "loss": 3.7847, "step": 3370 }, { "epoch": 1.7857331571994717, "grad_norm": 20.328062057495117, "learning_rate": 4.045102184637068e-05, "loss": 4.0727, "step": 3380 }, { "epoch": 1.7910171730515192, "grad_norm": 13.745095252990723, "learning_rate": 4.027484143763214e-05, "loss": 4.289, "step": 3390 }, { "epoch": 1.7963011889035667, "grad_norm": 16.017160415649414, "learning_rate": 4.009866102889359e-05, "loss": 5.3135, "step": 3400 }, { "epoch": 1.8015852047556142, "grad_norm": 21.178247451782227, "learning_rate": 3.992248062015504e-05, "loss": 3.9491, "step": 3410 }, { "epoch": 1.8068692206076618, "grad_norm": 18.000858306884766, "learning_rate": 3.974630021141649e-05, "loss": 4.7799, "step": 3420 }, { "epoch": 1.8121532364597095, "grad_norm": 19.869834899902344, "learning_rate": 3.9570119802677946e-05, "loss": 4.8011, "step": 3430 }, { "epoch": 1.8174372523117568, "grad_norm": 21.30682373046875, "learning_rate": 3.939393939393939e-05, "loss": 4.6276, "step": 3440 }, { "epoch": 1.8227212681638045, "grad_norm": 12.255293846130371, "learning_rate": 3.921775898520085e-05, "loss": 4.4172, "step": 3450 }, { "epoch": 1.828005284015852, "grad_norm": 21.959030151367188, "learning_rate": 3.90415785764623e-05, "loss": 4.9598, "step": 3460 }, { "epoch": 1.8332892998678996, "grad_norm": 11.829292297363281, "learning_rate": 3.886539816772375e-05, "loss": 4.0288, "step": 3470 }, { "epoch": 1.8385733157199473, "grad_norm": 14.743148803710938, "learning_rate": 3.86892177589852e-05, "loss": 4.5109, "step": 3480 }, { "epoch": 1.8438573315719946, "grad_norm": 21.387439727783203, "learning_rate": 3.8513037350246657e-05, "loss": 4.9476, "step": 3490 }, { "epoch": 1.8491413474240423, "grad_norm": 17.235153198242188, "learning_rate": 3.8336856941508104e-05, "loss": 5.2709, "step": 3500 }, { "epoch": 1.8544253632760899, "grad_norm": 12.631864547729492, "learning_rate": 3.816067653276956e-05, "loss": 4.8131, "step": 3510 }, { "epoch": 1.8597093791281374, "grad_norm": 15.992369651794434, "learning_rate": 3.798449612403101e-05, "loss": 4.3891, "step": 3520 }, { "epoch": 1.864993394980185, "grad_norm": 15.020049095153809, "learning_rate": 3.7808315715292466e-05, "loss": 4.4299, "step": 3530 }, { "epoch": 1.8702774108322324, "grad_norm": 18.64441680908203, "learning_rate": 3.763213530655391e-05, "loss": 4.2511, "step": 3540 }, { "epoch": 1.8755614266842802, "grad_norm": 11.056464195251465, "learning_rate": 3.745595489781537e-05, "loss": 3.9592, "step": 3550 }, { "epoch": 1.8808454425363275, "grad_norm": 18.41510772705078, "learning_rate": 3.727977448907682e-05, "loss": 4.1504, "step": 3560 }, { "epoch": 1.8861294583883752, "grad_norm": 17.51602554321289, "learning_rate": 3.710359408033827e-05, "loss": 4.6766, "step": 3570 }, { "epoch": 1.8914134742404227, "grad_norm": 14.742583274841309, "learning_rate": 3.692741367159972e-05, "loss": 4.6842, "step": 3580 }, { "epoch": 1.8966974900924702, "grad_norm": 24.69353485107422, "learning_rate": 3.6751233262861176e-05, "loss": 4.6108, "step": 3590 }, { "epoch": 1.901981505944518, "grad_norm": 14.009031295776367, "learning_rate": 3.6575052854122623e-05, "loss": 4.3188, "step": 3600 }, { "epoch": 1.9072655217965653, "grad_norm": 32.68735122680664, "learning_rate": 3.639887244538408e-05, "loss": 4.1809, "step": 3610 }, { "epoch": 1.912549537648613, "grad_norm": 11.190203666687012, "learning_rate": 3.6222692036645525e-05, "loss": 3.7711, "step": 3620 }, { "epoch": 1.9178335535006605, "grad_norm": 17.2862606048584, "learning_rate": 3.604651162790698e-05, "loss": 4.8326, "step": 3630 }, { "epoch": 1.923117569352708, "grad_norm": 22.08545684814453, "learning_rate": 3.587033121916843e-05, "loss": 5.2763, "step": 3640 }, { "epoch": 1.9284015852047556, "grad_norm": 17.900863647460938, "learning_rate": 3.569415081042988e-05, "loss": 4.458, "step": 3650 }, { "epoch": 1.933685601056803, "grad_norm": 22.1374568939209, "learning_rate": 3.5517970401691334e-05, "loss": 4.6441, "step": 3660 }, { "epoch": 1.9389696169088508, "grad_norm": 31.27112579345703, "learning_rate": 3.534178999295278e-05, "loss": 4.8788, "step": 3670 }, { "epoch": 1.9442536327608981, "grad_norm": 17.499189376831055, "learning_rate": 3.5165609584214235e-05, "loss": 4.2856, "step": 3680 }, { "epoch": 1.9495376486129459, "grad_norm": 18.951818466186523, "learning_rate": 3.498942917547569e-05, "loss": 4.3049, "step": 3690 }, { "epoch": 1.9548216644649934, "grad_norm": 11.734273910522461, "learning_rate": 3.4813248766737136e-05, "loss": 5.0906, "step": 3700 }, { "epoch": 1.960105680317041, "grad_norm": 31.93767738342285, "learning_rate": 3.463706835799859e-05, "loss": 4.7646, "step": 3710 }, { "epoch": 1.9653896961690887, "grad_norm": 14.241568565368652, "learning_rate": 3.4460887949260044e-05, "loss": 4.3788, "step": 3720 }, { "epoch": 1.970673712021136, "grad_norm": 16.402860641479492, "learning_rate": 3.428470754052149e-05, "loss": 3.9259, "step": 3730 }, { "epoch": 1.9759577278731837, "grad_norm": 12.232470512390137, "learning_rate": 3.4108527131782945e-05, "loss": 3.9939, "step": 3740 }, { "epoch": 1.9812417437252312, "grad_norm": 28.40672492980957, "learning_rate": 3.39323467230444e-05, "loss": 4.0019, "step": 3750 }, { "epoch": 1.9865257595772787, "grad_norm": 32.095157623291016, "learning_rate": 3.375616631430585e-05, "loss": 4.7874, "step": 3760 }, { "epoch": 1.9918097754293262, "grad_norm": 16.792760848999023, "learning_rate": 3.35799859055673e-05, "loss": 4.0537, "step": 3770 }, { "epoch": 1.9970937912813738, "grad_norm": 16.95918083190918, "learning_rate": 3.3403805496828755e-05, "loss": 4.5087, "step": 3780 }, { "epoch": 2.002113606340819, "grad_norm": 14.665895462036133, "learning_rate": 3.322762508809021e-05, "loss": 4.237, "step": 3790 }, { "epoch": 2.0073976221928667, "grad_norm": 13.65630054473877, "learning_rate": 3.3051444679351656e-05, "loss": 3.0788, "step": 3800 }, { "epoch": 2.012681638044914, "grad_norm": 13.918439865112305, "learning_rate": 3.287526427061311e-05, "loss": 3.4074, "step": 3810 }, { "epoch": 2.0179656538969617, "grad_norm": 14.214478492736816, "learning_rate": 3.2699083861874564e-05, "loss": 3.8812, "step": 3820 }, { "epoch": 2.0232496697490094, "grad_norm": 13.854333877563477, "learning_rate": 3.252290345313601e-05, "loss": 3.1126, "step": 3830 }, { "epoch": 2.0285336856010567, "grad_norm": 24.404911041259766, "learning_rate": 3.2346723044397465e-05, "loss": 3.5378, "step": 3840 }, { "epoch": 2.0338177014531045, "grad_norm": 20.025548934936523, "learning_rate": 3.217054263565892e-05, "loss": 3.6253, "step": 3850 }, { "epoch": 2.0391017173051518, "grad_norm": 20.179168701171875, "learning_rate": 3.1994362226920366e-05, "loss": 3.5816, "step": 3860 }, { "epoch": 2.0443857331571995, "grad_norm": 18.37339973449707, "learning_rate": 3.181818181818182e-05, "loss": 3.8492, "step": 3870 }, { "epoch": 2.049669749009247, "grad_norm": 30.167470932006836, "learning_rate": 3.1642001409443274e-05, "loss": 3.9958, "step": 3880 }, { "epoch": 2.0549537648612946, "grad_norm": 11.583779335021973, "learning_rate": 3.146582100070472e-05, "loss": 4.598, "step": 3890 }, { "epoch": 2.0602377807133423, "grad_norm": 17.935171127319336, "learning_rate": 3.1289640591966176e-05, "loss": 3.5276, "step": 3900 }, { "epoch": 2.0655217965653896, "grad_norm": 16.461498260498047, "learning_rate": 3.111346018322763e-05, "loss": 3.2667, "step": 3910 }, { "epoch": 2.0708058124174373, "grad_norm": 20.244298934936523, "learning_rate": 3.0937279774489084e-05, "loss": 3.7206, "step": 3920 }, { "epoch": 2.0760898282694846, "grad_norm": 28.7255802154541, "learning_rate": 3.076109936575053e-05, "loss": 3.0949, "step": 3930 }, { "epoch": 2.0813738441215324, "grad_norm": 24.063356399536133, "learning_rate": 3.0584918957011985e-05, "loss": 3.2762, "step": 3940 }, { "epoch": 2.08665785997358, "grad_norm": 39.801815032958984, "learning_rate": 3.0408738548273435e-05, "loss": 3.4928, "step": 3950 }, { "epoch": 2.0919418758256274, "grad_norm": 15.45219612121582, "learning_rate": 3.0232558139534883e-05, "loss": 2.9575, "step": 3960 }, { "epoch": 2.097225891677675, "grad_norm": 17.929317474365234, "learning_rate": 3.0056377730796337e-05, "loss": 3.1462, "step": 3970 }, { "epoch": 2.1025099075297224, "grad_norm": 55.91295623779297, "learning_rate": 2.988019732205779e-05, "loss": 3.9278, "step": 3980 }, { "epoch": 2.10779392338177, "grad_norm": 13.527727127075195, "learning_rate": 2.9704016913319238e-05, "loss": 3.7229, "step": 3990 }, { "epoch": 2.1130779392338175, "grad_norm": 23.85780143737793, "learning_rate": 2.9527836504580692e-05, "loss": 3.6077, "step": 4000 }, { "epoch": 2.1183619550858652, "grad_norm": 22.299245834350586, "learning_rate": 2.9351656095842146e-05, "loss": 3.2283, "step": 4010 }, { "epoch": 2.123645970937913, "grad_norm": 15.696680068969727, "learning_rate": 2.9175475687103593e-05, "loss": 3.4589, "step": 4020 }, { "epoch": 2.1289299867899603, "grad_norm": 11.234966278076172, "learning_rate": 2.8999295278365047e-05, "loss": 4.154, "step": 4030 }, { "epoch": 2.134214002642008, "grad_norm": 17.276336669921875, "learning_rate": 2.88231148696265e-05, "loss": 3.2606, "step": 4040 }, { "epoch": 2.1394980184940553, "grad_norm": 19.701416015625, "learning_rate": 2.864693446088795e-05, "loss": 3.3845, "step": 4050 }, { "epoch": 2.144782034346103, "grad_norm": 16.69789695739746, "learning_rate": 2.8470754052149402e-05, "loss": 3.2469, "step": 4060 }, { "epoch": 2.150066050198151, "grad_norm": 11.74082088470459, "learning_rate": 2.8294573643410853e-05, "loss": 3.369, "step": 4070 }, { "epoch": 2.155350066050198, "grad_norm": 10.2099027633667, "learning_rate": 2.8118393234672307e-05, "loss": 3.5608, "step": 4080 }, { "epoch": 2.160634081902246, "grad_norm": 28.42884635925293, "learning_rate": 2.7942212825933754e-05, "loss": 3.6139, "step": 4090 }, { "epoch": 2.165918097754293, "grad_norm": 20.37766456604004, "learning_rate": 2.7766032417195208e-05, "loss": 3.3964, "step": 4100 }, { "epoch": 2.171202113606341, "grad_norm": 30.010757446289062, "learning_rate": 2.7589852008456662e-05, "loss": 3.2963, "step": 4110 }, { "epoch": 2.1764861294583886, "grad_norm": 19.75163459777832, "learning_rate": 2.741367159971811e-05, "loss": 3.7318, "step": 4120 }, { "epoch": 2.181770145310436, "grad_norm": 22.214811325073242, "learning_rate": 2.7237491190979563e-05, "loss": 3.0523, "step": 4130 }, { "epoch": 2.1870541611624836, "grad_norm": 18.17839241027832, "learning_rate": 2.7061310782241017e-05, "loss": 3.7926, "step": 4140 }, { "epoch": 2.192338177014531, "grad_norm": 14.976846694946289, "learning_rate": 2.6885130373502464e-05, "loss": 3.1637, "step": 4150 }, { "epoch": 2.1976221928665787, "grad_norm": 23.86251449584961, "learning_rate": 2.670894996476392e-05, "loss": 3.5104, "step": 4160 }, { "epoch": 2.202906208718626, "grad_norm": 72.32622528076172, "learning_rate": 2.6532769556025372e-05, "loss": 3.8043, "step": 4170 }, { "epoch": 2.2081902245706737, "grad_norm": 23.094135284423828, "learning_rate": 2.6356589147286826e-05, "loss": 3.4932, "step": 4180 }, { "epoch": 2.2134742404227215, "grad_norm": 32.60905838012695, "learning_rate": 2.6180408738548274e-05, "loss": 3.6579, "step": 4190 }, { "epoch": 2.2187582562747687, "grad_norm": 13.971908569335938, "learning_rate": 2.6004228329809728e-05, "loss": 3.1869, "step": 4200 }, { "epoch": 2.2240422721268165, "grad_norm": 13.490729331970215, "learning_rate": 2.582804792107118e-05, "loss": 3.2832, "step": 4210 }, { "epoch": 2.229326287978864, "grad_norm": 18.026714324951172, "learning_rate": 2.565186751233263e-05, "loss": 3.2434, "step": 4220 }, { "epoch": 2.2346103038309115, "grad_norm": 10.99934196472168, "learning_rate": 2.5475687103594083e-05, "loss": 3.3659, "step": 4230 }, { "epoch": 2.239894319682959, "grad_norm": 16.477264404296875, "learning_rate": 2.5299506694855533e-05, "loss": 3.51, "step": 4240 }, { "epoch": 2.2451783355350066, "grad_norm": 15.929734230041504, "learning_rate": 2.5123326286116984e-05, "loss": 3.3981, "step": 4250 }, { "epoch": 2.2504623513870543, "grad_norm": 25.625295639038086, "learning_rate": 2.4947145877378435e-05, "loss": 3.8105, "step": 4260 }, { "epoch": 2.2557463672391016, "grad_norm": 32.14104080200195, "learning_rate": 2.477096546863989e-05, "loss": 4.8187, "step": 4270 }, { "epoch": 2.2610303830911493, "grad_norm": 12.69516372680664, "learning_rate": 2.459478505990134e-05, "loss": 3.4329, "step": 4280 }, { "epoch": 2.2663143989431966, "grad_norm": 18.824922561645508, "learning_rate": 2.441860465116279e-05, "loss": 3.9682, "step": 4290 }, { "epoch": 2.2715984147952444, "grad_norm": 15.157118797302246, "learning_rate": 2.4242424242424244e-05, "loss": 4.2365, "step": 4300 }, { "epoch": 2.276882430647292, "grad_norm": 16.278324127197266, "learning_rate": 2.4066243833685695e-05, "loss": 3.3195, "step": 4310 }, { "epoch": 2.2821664464993394, "grad_norm": 14.254020690917969, "learning_rate": 2.389006342494715e-05, "loss": 3.5524, "step": 4320 }, { "epoch": 2.287450462351387, "grad_norm": 20.348011016845703, "learning_rate": 2.37138830162086e-05, "loss": 3.3471, "step": 4330 }, { "epoch": 2.2927344782034345, "grad_norm": 28.826488494873047, "learning_rate": 2.353770260747005e-05, "loss": 3.0519, "step": 4340 }, { "epoch": 2.298018494055482, "grad_norm": 22.089832305908203, "learning_rate": 2.3361522198731504e-05, "loss": 4.4239, "step": 4350 }, { "epoch": 2.30330250990753, "grad_norm": 23.556886672973633, "learning_rate": 2.3185341789992954e-05, "loss": 3.8301, "step": 4360 }, { "epoch": 2.3085865257595772, "grad_norm": 17.684389114379883, "learning_rate": 2.3009161381254405e-05, "loss": 3.4081, "step": 4370 }, { "epoch": 2.313870541611625, "grad_norm": 12.611895561218262, "learning_rate": 2.283298097251586e-05, "loss": 4.1883, "step": 4380 }, { "epoch": 2.3191545574636723, "grad_norm": 23.810239791870117, "learning_rate": 2.265680056377731e-05, "loss": 3.435, "step": 4390 }, { "epoch": 2.32443857331572, "grad_norm": 11.030603408813477, "learning_rate": 2.2480620155038764e-05, "loss": 3.2568, "step": 4400 }, { "epoch": 2.3297225891677673, "grad_norm": 27.612186431884766, "learning_rate": 2.2304439746300214e-05, "loss": 3.688, "step": 4410 }, { "epoch": 2.335006605019815, "grad_norm": 17.177663803100586, "learning_rate": 2.2128259337561665e-05, "loss": 3.3075, "step": 4420 }, { "epoch": 2.340290620871863, "grad_norm": 20.293621063232422, "learning_rate": 2.1952078928823115e-05, "loss": 2.914, "step": 4430 }, { "epoch": 2.34557463672391, "grad_norm": 10.931967735290527, "learning_rate": 2.1775898520084566e-05, "loss": 3.3027, "step": 4440 }, { "epoch": 2.350858652575958, "grad_norm": 20.574386596679688, "learning_rate": 2.159971811134602e-05, "loss": 3.0492, "step": 4450 }, { "epoch": 2.356142668428005, "grad_norm": 21.01512908935547, "learning_rate": 2.142353770260747e-05, "loss": 2.991, "step": 4460 }, { "epoch": 2.361426684280053, "grad_norm": 17.922767639160156, "learning_rate": 2.124735729386892e-05, "loss": 3.7925, "step": 4470 }, { "epoch": 2.3667107001321, "grad_norm": 13.694111824035645, "learning_rate": 2.1071176885130375e-05, "loss": 3.7547, "step": 4480 }, { "epoch": 2.371994715984148, "grad_norm": 14.418509483337402, "learning_rate": 2.0894996476391826e-05, "loss": 3.6175, "step": 4490 }, { "epoch": 2.3772787318361956, "grad_norm": 19.679662704467773, "learning_rate": 2.0718816067653276e-05, "loss": 3.5094, "step": 4500 }, { "epoch": 2.382562747688243, "grad_norm": 15.298222541809082, "learning_rate": 2.054263565891473e-05, "loss": 3.5767, "step": 4510 }, { "epoch": 2.3878467635402907, "grad_norm": 13.02542495727539, "learning_rate": 2.036645525017618e-05, "loss": 3.625, "step": 4520 }, { "epoch": 2.3931307793923384, "grad_norm": 26.76813507080078, "learning_rate": 2.0190274841437635e-05, "loss": 3.3456, "step": 4530 }, { "epoch": 2.3984147952443857, "grad_norm": 27.34304428100586, "learning_rate": 2.0014094432699086e-05, "loss": 3.4719, "step": 4540 }, { "epoch": 2.4036988110964335, "grad_norm": 20.406082153320312, "learning_rate": 1.9837914023960536e-05, "loss": 3.586, "step": 4550 }, { "epoch": 2.4089828269484808, "grad_norm": 16.045312881469727, "learning_rate": 1.966173361522199e-05, "loss": 3.2718, "step": 4560 }, { "epoch": 2.4142668428005285, "grad_norm": 23.47331428527832, "learning_rate": 1.948555320648344e-05, "loss": 3.8998, "step": 4570 }, { "epoch": 2.419550858652576, "grad_norm": 11.203316688537598, "learning_rate": 1.930937279774489e-05, "loss": 3.492, "step": 4580 }, { "epoch": 2.4248348745046235, "grad_norm": 12.664722442626953, "learning_rate": 1.9133192389006342e-05, "loss": 3.1292, "step": 4590 }, { "epoch": 2.4301188903566713, "grad_norm": 9.169426918029785, "learning_rate": 1.8957011980267793e-05, "loss": 3.4731, "step": 4600 }, { "epoch": 2.4354029062087186, "grad_norm": 17.66658592224121, "learning_rate": 1.8780831571529247e-05, "loss": 3.7711, "step": 4610 }, { "epoch": 2.4406869220607663, "grad_norm": 13.179720878601074, "learning_rate": 1.8604651162790697e-05, "loss": 2.9846, "step": 4620 }, { "epoch": 2.4459709379128136, "grad_norm": 17.785696029663086, "learning_rate": 1.8428470754052148e-05, "loss": 3.4984, "step": 4630 }, { "epoch": 2.4512549537648614, "grad_norm": 22.99701690673828, "learning_rate": 1.8252290345313602e-05, "loss": 3.1882, "step": 4640 }, { "epoch": 2.4565389696169087, "grad_norm": 17.009098052978516, "learning_rate": 1.8076109936575052e-05, "loss": 3.3042, "step": 4650 }, { "epoch": 2.4618229854689564, "grad_norm": 16.592060089111328, "learning_rate": 1.7899929527836506e-05, "loss": 3.5604, "step": 4660 }, { "epoch": 2.467107001321004, "grad_norm": 19.692729949951172, "learning_rate": 1.7723749119097957e-05, "loss": 3.2379, "step": 4670 }, { "epoch": 2.4723910171730514, "grad_norm": 13.656821250915527, "learning_rate": 1.7547568710359408e-05, "loss": 2.9746, "step": 4680 }, { "epoch": 2.477675033025099, "grad_norm": 14.264239311218262, "learning_rate": 1.737138830162086e-05, "loss": 3.817, "step": 4690 }, { "epoch": 2.4829590488771465, "grad_norm": 16.037349700927734, "learning_rate": 1.7195207892882312e-05, "loss": 3.5153, "step": 4700 }, { "epoch": 2.488243064729194, "grad_norm": 20.12216567993164, "learning_rate": 1.7019027484143766e-05, "loss": 3.3744, "step": 4710 }, { "epoch": 2.4935270805812415, "grad_norm": 15.195042610168457, "learning_rate": 1.6842847075405217e-05, "loss": 3.42, "step": 4720 }, { "epoch": 2.4988110964332892, "grad_norm": 28.55310821533203, "learning_rate": 1.6666666666666667e-05, "loss": 3.6766, "step": 4730 }, { "epoch": 2.504095112285337, "grad_norm": 18.743507385253906, "learning_rate": 1.649048625792812e-05, "loss": 3.6339, "step": 4740 }, { "epoch": 2.5093791281373843, "grad_norm": 27.498262405395508, "learning_rate": 1.6314305849189572e-05, "loss": 3.3479, "step": 4750 }, { "epoch": 2.514663143989432, "grad_norm": 17.464143753051758, "learning_rate": 1.6138125440451023e-05, "loss": 3.6936, "step": 4760 }, { "epoch": 2.5199471598414798, "grad_norm": 18.474689483642578, "learning_rate": 1.5961945031712473e-05, "loss": 3.375, "step": 4770 }, { "epoch": 2.525231175693527, "grad_norm": 16.313709259033203, "learning_rate": 1.5785764622973924e-05, "loss": 3.8723, "step": 4780 }, { "epoch": 2.5305151915455744, "grad_norm": 16.55835723876953, "learning_rate": 1.5609584214235378e-05, "loss": 3.3616, "step": 4790 }, { "epoch": 2.535799207397622, "grad_norm": 14.966315269470215, "learning_rate": 1.543340380549683e-05, "loss": 3.6797, "step": 4800 }, { "epoch": 2.54108322324967, "grad_norm": 13.11168098449707, "learning_rate": 1.525722339675828e-05, "loss": 3.601, "step": 4810 }, { "epoch": 2.546367239101717, "grad_norm": 42.751434326171875, "learning_rate": 1.5081042988019733e-05, "loss": 3.6341, "step": 4820 }, { "epoch": 2.551651254953765, "grad_norm": 19.76482582092285, "learning_rate": 1.4904862579281184e-05, "loss": 3.471, "step": 4830 }, { "epoch": 2.5569352708058126, "grad_norm": 18.304105758666992, "learning_rate": 1.4728682170542638e-05, "loss": 3.5806, "step": 4840 }, { "epoch": 2.56221928665786, "grad_norm": 27.958696365356445, "learning_rate": 1.4552501761804088e-05, "loss": 3.1213, "step": 4850 }, { "epoch": 2.5675033025099077, "grad_norm": 17.248594284057617, "learning_rate": 1.4376321353065539e-05, "loss": 3.3494, "step": 4860 }, { "epoch": 2.572787318361955, "grad_norm": 13.064223289489746, "learning_rate": 1.4200140944326993e-05, "loss": 3.2463, "step": 4870 }, { "epoch": 2.5780713342140027, "grad_norm": 13.665904998779297, "learning_rate": 1.4023960535588444e-05, "loss": 3.3252, "step": 4880 }, { "epoch": 2.58335535006605, "grad_norm": 19.32305145263672, "learning_rate": 1.3847780126849896e-05, "loss": 2.8653, "step": 4890 }, { "epoch": 2.5886393659180977, "grad_norm": 16.994230270385742, "learning_rate": 1.3671599718111346e-05, "loss": 2.9332, "step": 4900 }, { "epoch": 2.5939233817701455, "grad_norm": 27.408260345458984, "learning_rate": 1.3495419309372797e-05, "loss": 3.2594, "step": 4910 }, { "epoch": 2.5992073976221928, "grad_norm": 13.718049049377441, "learning_rate": 1.3319238900634251e-05, "loss": 3.058, "step": 4920 }, { "epoch": 2.6044914134742405, "grad_norm": 22.911998748779297, "learning_rate": 1.3143058491895702e-05, "loss": 3.2382, "step": 4930 }, { "epoch": 2.6097754293262883, "grad_norm": 20.565528869628906, "learning_rate": 1.2966878083157152e-05, "loss": 3.2209, "step": 4940 }, { "epoch": 2.6150594451783356, "grad_norm": 18.180601119995117, "learning_rate": 1.2790697674418606e-05, "loss": 3.7973, "step": 4950 }, { "epoch": 2.620343461030383, "grad_norm": 14.831124305725098, "learning_rate": 1.2614517265680057e-05, "loss": 3.8681, "step": 4960 }, { "epoch": 2.6256274768824306, "grad_norm": 24.32288360595703, "learning_rate": 1.243833685694151e-05, "loss": 3.3676, "step": 4970 }, { "epoch": 2.6309114927344783, "grad_norm": 18.79412841796875, "learning_rate": 1.226215644820296e-05, "loss": 4.0666, "step": 4980 }, { "epoch": 2.6361955085865256, "grad_norm": 18.881690979003906, "learning_rate": 1.2085976039464412e-05, "loss": 3.7928, "step": 4990 }, { "epoch": 2.6414795244385734, "grad_norm": 27.418704986572266, "learning_rate": 1.1909795630725863e-05, "loss": 3.1532, "step": 5000 }, { "epoch": 2.646763540290621, "grad_norm": 19.96307945251465, "learning_rate": 1.1733615221987315e-05, "loss": 3.7501, "step": 5010 }, { "epoch": 2.6520475561426684, "grad_norm": 19.64175033569336, "learning_rate": 1.1557434813248767e-05, "loss": 3.4919, "step": 5020 }, { "epoch": 2.657331571994716, "grad_norm": 31.352962493896484, "learning_rate": 1.138125440451022e-05, "loss": 3.3118, "step": 5030 }, { "epoch": 2.6626155878467634, "grad_norm": 23.362979888916016, "learning_rate": 1.1205073995771672e-05, "loss": 3.6286, "step": 5040 }, { "epoch": 2.667899603698811, "grad_norm": 13.916152954101562, "learning_rate": 1.1028893587033122e-05, "loss": 3.7259, "step": 5050 }, { "epoch": 2.6731836195508585, "grad_norm": 18.22846794128418, "learning_rate": 1.0852713178294575e-05, "loss": 3.3308, "step": 5060 }, { "epoch": 2.678467635402906, "grad_norm": 13.093436241149902, "learning_rate": 1.0676532769556025e-05, "loss": 3.0981, "step": 5070 }, { "epoch": 2.683751651254954, "grad_norm": 26.124799728393555, "learning_rate": 1.0500352360817478e-05, "loss": 3.5098, "step": 5080 }, { "epoch": 2.6890356671070013, "grad_norm": 21.927812576293945, "learning_rate": 1.0324171952078928e-05, "loss": 3.3641, "step": 5090 }, { "epoch": 2.694319682959049, "grad_norm": 15.887828826904297, "learning_rate": 1.014799154334038e-05, "loss": 3.7715, "step": 5100 }, { "epoch": 2.6996036988110963, "grad_norm": 28.31195068359375, "learning_rate": 9.971811134601833e-06, "loss": 3.5266, "step": 5110 }, { "epoch": 2.704887714663144, "grad_norm": 19.04535675048828, "learning_rate": 9.795630725863285e-06, "loss": 3.8404, "step": 5120 }, { "epoch": 2.7101717305151913, "grad_norm": 22.21696662902832, "learning_rate": 9.619450317124738e-06, "loss": 3.6037, "step": 5130 }, { "epoch": 2.715455746367239, "grad_norm": 15.042706489562988, "learning_rate": 9.443269908386188e-06, "loss": 3.6643, "step": 5140 }, { "epoch": 2.720739762219287, "grad_norm": 17.73995590209961, "learning_rate": 9.267089499647639e-06, "loss": 3.5092, "step": 5150 }, { "epoch": 2.726023778071334, "grad_norm": 32.415443420410156, "learning_rate": 9.090909090909091e-06, "loss": 3.9008, "step": 5160 }, { "epoch": 2.731307793923382, "grad_norm": 22.608060836791992, "learning_rate": 8.914728682170543e-06, "loss": 3.113, "step": 5170 }, { "epoch": 2.7365918097754296, "grad_norm": 21.9082088470459, "learning_rate": 8.738548273431994e-06, "loss": 3.3552, "step": 5180 }, { "epoch": 2.741875825627477, "grad_norm": 22.067411422729492, "learning_rate": 8.562367864693446e-06, "loss": 3.3768, "step": 5190 }, { "epoch": 2.747159841479524, "grad_norm": 17.794010162353516, "learning_rate": 8.386187455954899e-06, "loss": 3.1427, "step": 5200 }, { "epoch": 2.752443857331572, "grad_norm": 16.173418045043945, "learning_rate": 8.210007047216351e-06, "loss": 2.9815, "step": 5210 }, { "epoch": 2.7577278731836197, "grad_norm": 12.781997680664062, "learning_rate": 8.033826638477801e-06, "loss": 3.3121, "step": 5220 }, { "epoch": 2.763011889035667, "grad_norm": 12.390795707702637, "learning_rate": 7.857646229739254e-06, "loss": 3.2196, "step": 5230 }, { "epoch": 2.7682959048877147, "grad_norm": 15.312797546386719, "learning_rate": 7.681465821000704e-06, "loss": 2.9771, "step": 5240 }, { "epoch": 2.7735799207397624, "grad_norm": 22.249895095825195, "learning_rate": 7.5052854122621575e-06, "loss": 3.4433, "step": 5250 }, { "epoch": 2.7788639365918097, "grad_norm": 25.02825355529785, "learning_rate": 7.329105003523609e-06, "loss": 3.2673, "step": 5260 }, { "epoch": 2.7841479524438575, "grad_norm": 12.671621322631836, "learning_rate": 7.15292459478506e-06, "loss": 3.3033, "step": 5270 }, { "epoch": 2.789431968295905, "grad_norm": 16.456417083740234, "learning_rate": 6.976744186046512e-06, "loss": 3.3805, "step": 5280 }, { "epoch": 2.7947159841479525, "grad_norm": 15.440134048461914, "learning_rate": 6.800563777307964e-06, "loss": 2.863, "step": 5290 }, { "epoch": 2.8, "grad_norm": 14.742483139038086, "learning_rate": 6.624383368569416e-06, "loss": 3.5084, "step": 5300 }, { "epoch": 2.8052840158520476, "grad_norm": 15.058581352233887, "learning_rate": 6.448202959830866e-06, "loss": 3.3316, "step": 5310 }, { "epoch": 2.8105680317040953, "grad_norm": 18.808738708496094, "learning_rate": 6.2720225510923186e-06, "loss": 2.7818, "step": 5320 }, { "epoch": 2.8158520475561426, "grad_norm": 21.67080307006836, "learning_rate": 6.095842142353771e-06, "loss": 3.0287, "step": 5330 }, { "epoch": 2.8211360634081903, "grad_norm": 33.148887634277344, "learning_rate": 5.919661733615222e-06, "loss": 3.4332, "step": 5340 }, { "epoch": 2.8264200792602376, "grad_norm": 15.873844146728516, "learning_rate": 5.743481324876674e-06, "loss": 3.6903, "step": 5350 }, { "epoch": 2.8317040951122854, "grad_norm": 10.232421875, "learning_rate": 5.567300916138125e-06, "loss": 3.1569, "step": 5360 }, { "epoch": 2.8369881109643327, "grad_norm": 18.792686462402344, "learning_rate": 5.3911205073995775e-06, "loss": 3.5471, "step": 5370 }, { "epoch": 2.8422721268163804, "grad_norm": 26.9221248626709, "learning_rate": 5.21494009866103e-06, "loss": 2.7597, "step": 5380 }, { "epoch": 2.847556142668428, "grad_norm": 17.804426193237305, "learning_rate": 5.0387596899224804e-06, "loss": 3.3494, "step": 5390 }, { "epoch": 2.8528401585204755, "grad_norm": 10.929807662963867, "learning_rate": 4.862579281183933e-06, "loss": 3.9842, "step": 5400 }, { "epoch": 2.858124174372523, "grad_norm": 15.657679557800293, "learning_rate": 4.686398872445384e-06, "loss": 3.2531, "step": 5410 }, { "epoch": 2.863408190224571, "grad_norm": 33.025203704833984, "learning_rate": 4.5102184637068365e-06, "loss": 3.3169, "step": 5420 }, { "epoch": 2.8686922060766182, "grad_norm": 26.05933380126953, "learning_rate": 4.334038054968288e-06, "loss": 3.3312, "step": 5430 }, { "epoch": 2.8739762219286655, "grad_norm": 30.90452766418457, "learning_rate": 4.157857646229739e-06, "loss": 3.3305, "step": 5440 }, { "epoch": 2.8792602377807133, "grad_norm": 12.377135276794434, "learning_rate": 3.981677237491191e-06, "loss": 2.9052, "step": 5450 }, { "epoch": 2.884544253632761, "grad_norm": 19.9295597076416, "learning_rate": 3.805496828752643e-06, "loss": 3.0459, "step": 5460 }, { "epoch": 2.8898282694848083, "grad_norm": 13.751632690429688, "learning_rate": 3.629316420014094e-06, "loss": 3.5149, "step": 5470 }, { "epoch": 2.895112285336856, "grad_norm": 52.6490364074707, "learning_rate": 3.4531360112755465e-06, "loss": 4.0335, "step": 5480 }, { "epoch": 2.900396301188904, "grad_norm": 20.521381378173828, "learning_rate": 3.2769556025369984e-06, "loss": 3.5587, "step": 5490 }, { "epoch": 2.905680317040951, "grad_norm": 14.23992919921875, "learning_rate": 3.10077519379845e-06, "loss": 3.4677, "step": 5500 }, { "epoch": 2.910964332892999, "grad_norm": 22.308265686035156, "learning_rate": 2.9245947850599013e-06, "loss": 3.1976, "step": 5510 }, { "epoch": 2.916248348745046, "grad_norm": 13.019217491149902, "learning_rate": 2.748414376321353e-06, "loss": 3.0215, "step": 5520 }, { "epoch": 2.921532364597094, "grad_norm": 18.27345848083496, "learning_rate": 2.5722339675828046e-06, "loss": 3.728, "step": 5530 }, { "epoch": 2.926816380449141, "grad_norm": 26.253225326538086, "learning_rate": 2.3960535588442565e-06, "loss": 3.3871, "step": 5540 }, { "epoch": 2.932100396301189, "grad_norm": 23.498109817504883, "learning_rate": 2.2198731501057084e-06, "loss": 3.503, "step": 5550 }, { "epoch": 2.9373844121532366, "grad_norm": 15.002182006835938, "learning_rate": 2.0436927413671602e-06, "loss": 3.8384, "step": 5560 }, { "epoch": 2.942668428005284, "grad_norm": 18.664382934570312, "learning_rate": 1.867512332628612e-06, "loss": 3.4036, "step": 5570 }, { "epoch": 2.9479524438573317, "grad_norm": 18.6015682220459, "learning_rate": 1.6913319238900636e-06, "loss": 3.234, "step": 5580 }, { "epoch": 2.953236459709379, "grad_norm": 17.370180130004883, "learning_rate": 1.5151515151515152e-06, "loss": 3.4581, "step": 5590 }, { "epoch": 2.9585204755614267, "grad_norm": 13.149053573608398, "learning_rate": 1.338971106412967e-06, "loss": 3.2361, "step": 5600 }, { "epoch": 2.963804491413474, "grad_norm": 27.13419532775879, "learning_rate": 1.1627906976744186e-06, "loss": 3.5342, "step": 5610 }, { "epoch": 2.9690885072655218, "grad_norm": 19.81624412536621, "learning_rate": 9.866102889358702e-07, "loss": 3.0217, "step": 5620 }, { "epoch": 2.9743725231175695, "grad_norm": 19.494544982910156, "learning_rate": 8.10429880197322e-07, "loss": 2.8814, "step": 5630 }, { "epoch": 2.979656538969617, "grad_norm": 27.98061752319336, "learning_rate": 6.342494714587738e-07, "loss": 3.8175, "step": 5640 }, { "epoch": 2.9849405548216645, "grad_norm": 25.441978454589844, "learning_rate": 4.5806906272022556e-07, "loss": 3.3658, "step": 5650 }, { "epoch": 2.9902245706737123, "grad_norm": 11.818696975708008, "learning_rate": 2.818886539816773e-07, "loss": 3.4707, "step": 5660 }, { "epoch": 2.9955085865257596, "grad_norm": 19.221172332763672, "learning_rate": 1.0570824524312897e-07, "loss": 3.5125, "step": 5670 } ], "logging_steps": 10, "max_steps": 5676, "num_input_tokens_seen": 0, "num_train_epochs": 3, "save_steps": 50, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 1.1819608209260544e+16, "train_batch_size": 2, "trial_name": null, "trial_params": null }