|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 119, |
|
"global_step": 1425, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.434104859828949, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8629, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"eval_loss": 0.9369699358940125, |
|
"eval_runtime": 61.2717, |
|
"eval_samples_per_second": 1.632, |
|
"eval_steps_per_second": 1.632, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.517804741859436, |
|
"learning_rate": 4e-05, |
|
"loss": 0.9794, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.3174158334732056, |
|
"learning_rate": 6e-05, |
|
"loss": 1.5473, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.6236504912376404, |
|
"learning_rate": 8e-05, |
|
"loss": 0.6592, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.49080607295036316, |
|
"learning_rate": 0.0001, |
|
"loss": 1.1104, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.43938764929771423, |
|
"learning_rate": 0.00012, |
|
"loss": 0.9446, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8905380368232727, |
|
"learning_rate": 0.00014, |
|
"loss": 0.6719, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.9044575095176697, |
|
"learning_rate": 0.00016, |
|
"loss": 0.6471, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.541214108467102, |
|
"learning_rate": 0.00018, |
|
"loss": 0.8339, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.075484275817871, |
|
"learning_rate": 0.0002, |
|
"loss": 0.9616, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.3413633406162262, |
|
"learning_rate": 0.0001999998618515421, |
|
"loss": 0.8251, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.7642049193382263, |
|
"learning_rate": 0.00019999944740655014, |
|
"loss": 1.3883, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.5759286284446716, |
|
"learning_rate": 0.00019999875666616918, |
|
"loss": 0.9162, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.16646409034729, |
|
"learning_rate": 0.00019999778963230775, |
|
"loss": 1.1735, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.5447037220001221, |
|
"learning_rate": 0.0001999965463076377, |
|
"loss": 1.2769, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.510498046875, |
|
"learning_rate": 0.00019999502669559432, |
|
"loss": 1.1439, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.514502227306366, |
|
"learning_rate": 0.00019999323080037624, |
|
"loss": 0.9929, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2014903873205185, |
|
"learning_rate": 0.00019999115862694546, |
|
"loss": 0.6706, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.6941856741905212, |
|
"learning_rate": 0.00019998881018102737, |
|
"loss": 0.7576, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.29424500465393066, |
|
"learning_rate": 0.00019998618546911056, |
|
"loss": 1.5048, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.431168556213379, |
|
"learning_rate": 0.00019998328449844714, |
|
"loss": 0.761, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.672117233276367, |
|
"learning_rate": 0.00019998010727705236, |
|
"loss": 0.9434, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.40351876616477966, |
|
"learning_rate": 0.00019997665381370477, |
|
"loss": 1.2708, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.36313024163246155, |
|
"learning_rate": 0.00019997292411794618, |
|
"loss": 0.6931, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7412884831428528, |
|
"learning_rate": 0.00019996891820008164, |
|
"loss": 0.827, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7299063205718994, |
|
"learning_rate": 0.00019996463607117935, |
|
"loss": 0.7135, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.5188469290733337, |
|
"learning_rate": 0.00019996007774307075, |
|
"loss": 0.574, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.23084047436714172, |
|
"learning_rate": 0.00019995524322835034, |
|
"loss": 1.0736, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.7209138870239258, |
|
"learning_rate": 0.00019995013254037574, |
|
"loss": 0.9087, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2266876697540283, |
|
"learning_rate": 0.00019994474569326757, |
|
"loss": 1.2464, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.6613232493400574, |
|
"learning_rate": 0.0001999390827019096, |
|
"loss": 0.9568, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0048911571502686, |
|
"learning_rate": 0.00019993314358194843, |
|
"loss": 1.1935, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9797032475471497, |
|
"learning_rate": 0.00019992692834979372, |
|
"loss": 1.1969, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.4784688353538513, |
|
"learning_rate": 0.00019992043702261793, |
|
"loss": 0.8674, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.863600015640259, |
|
"learning_rate": 0.00019991366961835642, |
|
"loss": 1.3276, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.4809297025203705, |
|
"learning_rate": 0.0001999066261557073, |
|
"loss": 1.0116, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6229730844497681, |
|
"learning_rate": 0.00019989930665413147, |
|
"loss": 1.2794, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5807106494903564, |
|
"learning_rate": 0.0001998917111338525, |
|
"loss": 1.0074, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.5811257362365723, |
|
"learning_rate": 0.00019988383961585645, |
|
"loss": 1.2491, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.46693727374076843, |
|
"learning_rate": 0.00019987569212189224, |
|
"loss": 0.8432, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.9792713522911072, |
|
"learning_rate": 0.00019986726867447107, |
|
"loss": 0.8716, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3240562081336975, |
|
"learning_rate": 0.00019985856929686667, |
|
"loss": 0.9934, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.7358172535896301, |
|
"learning_rate": 0.0001998495940131152, |
|
"loss": 0.7247, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.40497535467147827, |
|
"learning_rate": 0.00019984034284801502, |
|
"loss": 0.8262, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.36835265159606934, |
|
"learning_rate": 0.00019983081582712685, |
|
"loss": 1.0898, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7027626633644104, |
|
"learning_rate": 0.0001998210129767735, |
|
"loss": 1.2949, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.35624387860298157, |
|
"learning_rate": 0.00019981093432404006, |
|
"loss": 0.9734, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.4896808862686157, |
|
"learning_rate": 0.00019980057989677345, |
|
"loss": 1.2297, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.9656649827957153, |
|
"learning_rate": 0.00019978994972358265, |
|
"loss": 0.8358, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.644644021987915, |
|
"learning_rate": 0.0001997790438338385, |
|
"loss": 0.7373, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.0634011030197144, |
|
"learning_rate": 0.00019976786225767365, |
|
"loss": 1.3792, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.9041422605514526, |
|
"learning_rate": 0.00019975640502598244, |
|
"loss": 0.856, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.5682844519615173, |
|
"learning_rate": 0.00019974467217042085, |
|
"loss": 0.6686, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.7197520136833191, |
|
"learning_rate": 0.00019973266372340639, |
|
"loss": 0.6806, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2542258501052856, |
|
"learning_rate": 0.00019972037971811802, |
|
"loss": 1.353, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.6802650094032288, |
|
"learning_rate": 0.0001997078201884961, |
|
"loss": 1.0014, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.4886693060398102, |
|
"learning_rate": 0.0001996949851692422, |
|
"loss": 0.8613, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.0226856470108032, |
|
"learning_rate": 0.0001996818746958191, |
|
"loss": 1.0443, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.5514834523200989, |
|
"learning_rate": 0.00019966848880445062, |
|
"loss": 0.8816, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.4890052378177643, |
|
"learning_rate": 0.00019965482753212156, |
|
"loss": 0.8541, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9011398553848267, |
|
"learning_rate": 0.0001996408909165776, |
|
"loss": 1.1158, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7809276580810547, |
|
"learning_rate": 0.00019962667899632518, |
|
"loss": 0.6702, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.604097843170166, |
|
"learning_rate": 0.00019961219181063142, |
|
"loss": 1.1875, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9003333449363708, |
|
"learning_rate": 0.00019959742939952392, |
|
"loss": 1.2126, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.59239661693573, |
|
"learning_rate": 0.0001995823918037908, |
|
"loss": 1.1083, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6981655955314636, |
|
"learning_rate": 0.00019956707906498044, |
|
"loss": 0.9634, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.4635857045650482, |
|
"learning_rate": 0.00019955149122540152, |
|
"loss": 1.0345, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.35098958015441895, |
|
"learning_rate": 0.00019953562832812272, |
|
"loss": 0.8871, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4510989189147949, |
|
"learning_rate": 0.00019951949041697274, |
|
"loss": 0.6967, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.39661434292793274, |
|
"learning_rate": 0.00019950307753654017, |
|
"loss": 1.0786, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5836047530174255, |
|
"learning_rate": 0.00019948638973217323, |
|
"loss": 0.9265, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4589115381240845, |
|
"learning_rate": 0.00019946942704997982, |
|
"loss": 0.6476, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4495834410190582, |
|
"learning_rate": 0.00019945218953682734, |
|
"loss": 0.8693, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.35905730724334717, |
|
"learning_rate": 0.00019943467724034252, |
|
"loss": 1.0325, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.235016345977783, |
|
"learning_rate": 0.0001994168902089112, |
|
"loss": 1.3103, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.36725524067878723, |
|
"learning_rate": 0.00019939882849167852, |
|
"loss": 0.908, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.66635662317276, |
|
"learning_rate": 0.0001993804921385484, |
|
"loss": 0.682, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.121004819869995, |
|
"learning_rate": 0.0001993618812001836, |
|
"loss": 0.8462, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.44895172119140625, |
|
"learning_rate": 0.00019934299572800556, |
|
"loss": 0.9625, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5769445300102234, |
|
"learning_rate": 0.00019932383577419432, |
|
"loss": 0.7278, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3807710111141205, |
|
"learning_rate": 0.00019930440139168817, |
|
"loss": 0.657, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.212838813662529, |
|
"learning_rate": 0.00019928469263418374, |
|
"loss": 0.3094, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8039274215698242, |
|
"learning_rate": 0.0001992647095561357, |
|
"loss": 0.8898, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7184070348739624, |
|
"learning_rate": 0.00019924445221275675, |
|
"loss": 0.8613, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.4697589874267578, |
|
"learning_rate": 0.00019922392066001722, |
|
"loss": 0.9533, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7024903297424316, |
|
"learning_rate": 0.00019920311495464518, |
|
"loss": 0.7188, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.5008754730224609, |
|
"learning_rate": 0.00019918203515412617, |
|
"loss": 0.8329, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6406499743461609, |
|
"learning_rate": 0.00019916068131670302, |
|
"loss": 1.4259, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.39489614963531494, |
|
"learning_rate": 0.00019913905350137573, |
|
"loss": 0.5831, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7018424272537231, |
|
"learning_rate": 0.0001991171517679013, |
|
"loss": 0.9808, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.5011338591575623, |
|
"learning_rate": 0.00019909497617679348, |
|
"loss": 0.6806, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.2412053793668747, |
|
"learning_rate": 0.0001990725267893228, |
|
"loss": 1.0299, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4145759046077728, |
|
"learning_rate": 0.00019904980366751624, |
|
"loss": 1.4344, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.5789082050323486, |
|
"learning_rate": 0.00019902680687415705, |
|
"loss": 0.4662, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.28986847400665283, |
|
"learning_rate": 0.00019900353647278466, |
|
"loss": 1.296, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.33722007274627686, |
|
"learning_rate": 0.00019897999252769448, |
|
"loss": 0.8011, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6796722412109375, |
|
"learning_rate": 0.00019895617510393772, |
|
"loss": 0.972, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5858548879623413, |
|
"learning_rate": 0.00019893208426732115, |
|
"loss": 1.0073, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5766484141349792, |
|
"learning_rate": 0.00019890772008440704, |
|
"loss": 0.7884, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.37647876143455505, |
|
"learning_rate": 0.00019888308262251285, |
|
"loss": 0.6407, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5475112199783325, |
|
"learning_rate": 0.00019885817194971117, |
|
"loss": 1.1401, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1082801818847656, |
|
"learning_rate": 0.00019883298813482938, |
|
"loss": 1.392, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3952051103115082, |
|
"learning_rate": 0.00019880753124744963, |
|
"loss": 1.0498, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.19289755821228027, |
|
"learning_rate": 0.00019878180135790845, |
|
"loss": 0.4145, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5658400654792786, |
|
"learning_rate": 0.00019875579853729676, |
|
"loss": 1.0984, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.8976437449455261, |
|
"learning_rate": 0.00019872952285745959, |
|
"loss": 0.6919, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5265024900436401, |
|
"learning_rate": 0.00019870297439099577, |
|
"loss": 1.2932, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.8367021083831787, |
|
"learning_rate": 0.00019867615321125795, |
|
"loss": 1.4497, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5223955512046814, |
|
"learning_rate": 0.00019864905939235214, |
|
"loss": 1.0325, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.408779501914978, |
|
"learning_rate": 0.00019862169300913785, |
|
"loss": 0.9026, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.24817530810832977, |
|
"learning_rate": 0.00019859405413722746, |
|
"loss": 0.826, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.450430154800415, |
|
"learning_rate": 0.0001985661428529863, |
|
"loss": 0.9791, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4882315993309021, |
|
"learning_rate": 0.0001985379592335325, |
|
"loss": 0.7889, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.42783451080322266, |
|
"learning_rate": 0.00019850950335673643, |
|
"loss": 1.1608, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0337790250778198, |
|
"learning_rate": 0.00019848077530122083, |
|
"loss": 1.045, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3064819872379303, |
|
"learning_rate": 0.00019845177514636042, |
|
"loss": 0.6474, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.29662173986434937, |
|
"learning_rate": 0.00019842250297228176, |
|
"loss": 0.9493, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.515562891960144, |
|
"learning_rate": 0.00019839295885986296, |
|
"loss": 1.0605, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.4514832496643066, |
|
"learning_rate": 0.0001983631428907335, |
|
"loss": 0.6917, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 0.8804921507835388, |
|
"eval_runtime": 61.5233, |
|
"eval_samples_per_second": 1.625, |
|
"eval_steps_per_second": 1.625, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.30757004022598267, |
|
"learning_rate": 0.00019833305514727395, |
|
"loss": 0.9722, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5162855386734009, |
|
"learning_rate": 0.00019830269571261583, |
|
"loss": 1.2197, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.5095639824867249, |
|
"learning_rate": 0.00019827206467064133, |
|
"loss": 0.8676, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4804045557975769, |
|
"learning_rate": 0.00019824116210598306, |
|
"loss": 0.8565, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.28008362650871277, |
|
"learning_rate": 0.0001982099881040239, |
|
"loss": 0.9001, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6209085583686829, |
|
"learning_rate": 0.0001981785427508966, |
|
"loss": 0.7188, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.32877278327941895, |
|
"learning_rate": 0.0001981468261334837, |
|
"loss": 0.6749, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4256601631641388, |
|
"learning_rate": 0.00019811483833941728, |
|
"loss": 0.8086, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.1572288274765015, |
|
"learning_rate": 0.0001980825794570786, |
|
"loss": 0.8554, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4987819194793701, |
|
"learning_rate": 0.00019805004957559793, |
|
"loss": 0.6999, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6852537393569946, |
|
"learning_rate": 0.00019801724878485438, |
|
"loss": 0.8759, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.7970736622810364, |
|
"learning_rate": 0.00019798417717547552, |
|
"loss": 0.7471, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.5638220310211182, |
|
"learning_rate": 0.00019795083483883715, |
|
"loss": 1.0391, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.5482009649276733, |
|
"learning_rate": 0.00019791722186706317, |
|
"loss": 0.8363, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.23791633546352386, |
|
"learning_rate": 0.0001978833383530251, |
|
"loss": 0.725, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.5339345335960388, |
|
"learning_rate": 0.00019784918439034216, |
|
"loss": 0.9828, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.24769064784049988, |
|
"learning_rate": 0.00019781476007338058, |
|
"loss": 0.9496, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.46634215116500854, |
|
"learning_rate": 0.00019778006549725375, |
|
"loss": 1.0973, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.8007522821426392, |
|
"learning_rate": 0.00019774510075782172, |
|
"loss": 0.6847, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5393804907798767, |
|
"learning_rate": 0.00019770986595169096, |
|
"loss": 0.6461, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2891620695590973, |
|
"learning_rate": 0.00019767436117621413, |
|
"loss": 0.2937, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9463545680046082, |
|
"learning_rate": 0.0001976385865294899, |
|
"loss": 0.4934, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.25647807121276855, |
|
"learning_rate": 0.00019760254211036244, |
|
"loss": 0.7446, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.49435535073280334, |
|
"learning_rate": 0.00019756622801842143, |
|
"loss": 0.3544, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.7826042175292969, |
|
"learning_rate": 0.00019752964435400155, |
|
"loss": 0.6972, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.7160178422927856, |
|
"learning_rate": 0.00019749279121818235, |
|
"loss": 0.9655, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.3925221264362335, |
|
"learning_rate": 0.00019745566871278794, |
|
"loss": 0.9041, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5669321417808533, |
|
"learning_rate": 0.0001974182769403866, |
|
"loss": 0.9093, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5025343298912048, |
|
"learning_rate": 0.00019738061600429064, |
|
"loss": 0.6226, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1127972602844238, |
|
"learning_rate": 0.0001973426860085561, |
|
"loss": 0.7431, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4064362049102783, |
|
"learning_rate": 0.00019730448705798239, |
|
"loss": 0.8444, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9272475242614746, |
|
"learning_rate": 0.00019726601925811204, |
|
"loss": 0.836, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6594715118408203, |
|
"learning_rate": 0.00019722728271523034, |
|
"loss": 0.9031, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.9399688839912415, |
|
"learning_rate": 0.00019718827753636522, |
|
"loss": 0.7959, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4452652633190155, |
|
"learning_rate": 0.00019714900382928675, |
|
"loss": 0.5638, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.47481146454811096, |
|
"learning_rate": 0.000197109461702507, |
|
"loss": 0.8291, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.1962246149778366, |
|
"learning_rate": 0.00019706965126527963, |
|
"loss": 0.7894, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.366571307182312, |
|
"learning_rate": 0.00019702957262759965, |
|
"loss": 1.1808, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.3261445760726929, |
|
"learning_rate": 0.00019698922590020312, |
|
"loss": 0.8769, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5637160539627075, |
|
"learning_rate": 0.00019694861119456679, |
|
"loss": 0.882, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4508800208568573, |
|
"learning_rate": 0.0001969077286229078, |
|
"loss": 1.2723, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.41292956471443176, |
|
"learning_rate": 0.0001968665782981835, |
|
"loss": 0.7919, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6105634570121765, |
|
"learning_rate": 0.00019682516033409092, |
|
"loss": 1.0901, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6460319757461548, |
|
"learning_rate": 0.00019678347484506669, |
|
"loss": 1.0425, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.8627430200576782, |
|
"learning_rate": 0.00019674152194628638, |
|
"loss": 0.8019, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3218872547149658, |
|
"learning_rate": 0.00019669930175366472, |
|
"loss": 0.8345, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.6773053407669067, |
|
"learning_rate": 0.00019665681438385473, |
|
"loss": 1.3567, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3802971839904785, |
|
"learning_rate": 0.0001966140599542477, |
|
"loss": 0.7315, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.9038891196250916, |
|
"learning_rate": 0.0001965710385829728, |
|
"loss": 0.6807, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.7831525802612305, |
|
"learning_rate": 0.00019652775038889674, |
|
"loss": 1.2796, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.3705346882343292, |
|
"learning_rate": 0.00019648419549162348, |
|
"loss": 0.8275, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.7794845104217529, |
|
"learning_rate": 0.0001964403740114939, |
|
"loss": 0.7539, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.2621815800666809, |
|
"learning_rate": 0.00019639628606958533, |
|
"loss": 0.976, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6929745674133301, |
|
"learning_rate": 0.00019635193178771143, |
|
"loss": 0.6198, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.543230414390564, |
|
"learning_rate": 0.0001963073112884217, |
|
"loss": 0.9319, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6732174158096313, |
|
"learning_rate": 0.0001962624246950012, |
|
"loss": 0.804, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.25452062487602234, |
|
"learning_rate": 0.00019621727213147027, |
|
"loss": 0.7632, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6591973304748535, |
|
"learning_rate": 0.00019617185372258392, |
|
"loss": 0.9745, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.6275454163551331, |
|
"learning_rate": 0.0001961261695938319, |
|
"loss": 0.3411, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6691128611564636, |
|
"learning_rate": 0.00019608021987143804, |
|
"loss": 0.9564, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.3190310299396515, |
|
"learning_rate": 0.00019603400468235998, |
|
"loss": 1.3002, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4648153781890869, |
|
"learning_rate": 0.0001959875241542889, |
|
"loss": 0.9507, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.5921639800071716, |
|
"learning_rate": 0.00019594077841564907, |
|
"loss": 0.9397, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5769446492195129, |
|
"learning_rate": 0.00019589376759559745, |
|
"loss": 0.9958, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.8454503417015076, |
|
"learning_rate": 0.00019584649182402357, |
|
"loss": 1.189, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.2865101099014282, |
|
"learning_rate": 0.0001957989512315489, |
|
"loss": 0.6747, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.3642055094242096, |
|
"learning_rate": 0.0001957511459495266, |
|
"loss": 0.5196, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4965610206127167, |
|
"learning_rate": 0.00019570307611004124, |
|
"loss": 0.9448, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5694214105606079, |
|
"learning_rate": 0.00019565474184590826, |
|
"loss": 0.868, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.6402484774589539, |
|
"learning_rate": 0.00019560614329067378, |
|
"loss": 0.8872, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.37722048163414, |
|
"learning_rate": 0.0001955572805786141, |
|
"loss": 0.9253, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.9157966375350952, |
|
"learning_rate": 0.00019550815384473534, |
|
"loss": 1.6508, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.33376675844192505, |
|
"learning_rate": 0.0001954587632247732, |
|
"loss": 0.9109, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.2680880129337311, |
|
"learning_rate": 0.00019540910885519242, |
|
"loss": 1.0693, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.7726811766624451, |
|
"learning_rate": 0.00019535919087318652, |
|
"loss": 0.9574, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.8604207634925842, |
|
"learning_rate": 0.0001953090094166773, |
|
"loss": 0.9475, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.3954675197601318, |
|
"learning_rate": 0.0001952585646243146, |
|
"loss": 1.5094, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.39931145310401917, |
|
"learning_rate": 0.00019520785663547586, |
|
"loss": 0.9915, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.772156298160553, |
|
"learning_rate": 0.00019515688559026563, |
|
"loss": 1.4155, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.48633861541748047, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 0.9607, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4661516845226288, |
|
"learning_rate": 0.0001950541548947829, |
|
"loss": 1.0283, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.8846752047538757, |
|
"learning_rate": 0.00019500239552835215, |
|
"loss": 0.756, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9870714545249939, |
|
"learning_rate": 0.00019495037367323262, |
|
"loss": 0.7688, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.7435501217842102, |
|
"learning_rate": 0.00019489808947315915, |
|
"loss": 0.4752, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.6509325504302979, |
|
"learning_rate": 0.0001948455430725913, |
|
"loss": 0.9053, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.30190637707710266, |
|
"learning_rate": 0.0001947927346167132, |
|
"loss": 0.9323, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.420055627822876, |
|
"learning_rate": 0.00019473966425143292, |
|
"loss": 0.6446, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.49513018131256104, |
|
"learning_rate": 0.00019468633212338233, |
|
"loss": 0.9022, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4812709391117096, |
|
"learning_rate": 0.00019463273837991643, |
|
"loss": 0.6835, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.2101246416568756, |
|
"learning_rate": 0.00019457888316911306, |
|
"loss": 0.5991, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.3539298176765442, |
|
"learning_rate": 0.00019452476663977248, |
|
"loss": 0.7323, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.29954612255096436, |
|
"learning_rate": 0.00019447038894141705, |
|
"loss": 0.6868, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4053567349910736, |
|
"learning_rate": 0.00019441575022429065, |
|
"loss": 1.0805, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.7733739614486694, |
|
"learning_rate": 0.00019436085063935835, |
|
"loss": 1.3524, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.6096423864364624, |
|
"learning_rate": 0.00019430569033830605, |
|
"loss": 1.0183, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1940584182739258, |
|
"learning_rate": 0.00019425026947353992, |
|
"loss": 1.0919, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4030895233154297, |
|
"learning_rate": 0.00019419458819818614, |
|
"loss": 0.7642, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.4116997718811035, |
|
"learning_rate": 0.00019413864666609034, |
|
"loss": 0.6112, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4545953869819641, |
|
"learning_rate": 0.00019408244503181724, |
|
"loss": 0.7328, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9334838390350342, |
|
"learning_rate": 0.0001940259834506502, |
|
"loss": 1.0518, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.2695348858833313, |
|
"learning_rate": 0.00019396926207859084, |
|
"loss": 0.987, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.3967281579971313, |
|
"learning_rate": 0.00019391228107235858, |
|
"loss": 1.0819, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0220236778259277, |
|
"learning_rate": 0.00019385504058939024, |
|
"loss": 0.9621, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.5694682598114014, |
|
"learning_rate": 0.00019379754078783937, |
|
"loss": 1.0647, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6181725263595581, |
|
"learning_rate": 0.00019373978182657625, |
|
"loss": 1.0991, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.508532702922821, |
|
"learning_rate": 0.0001936817638651871, |
|
"loss": 1.0276, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.3763074278831482, |
|
"learning_rate": 0.00019362348706397373, |
|
"loss": 0.7447, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9533042311668396, |
|
"learning_rate": 0.00019356495158395315, |
|
"loss": 1.1979, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.43593689799308777, |
|
"learning_rate": 0.00019350615758685708, |
|
"loss": 1.0028, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.7646205425262451, |
|
"learning_rate": 0.00019344710523513156, |
|
"loss": 1.463, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.29402196407318115, |
|
"learning_rate": 0.00019338779469193639, |
|
"loss": 1.2726, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5773300528526306, |
|
"learning_rate": 0.00019332822612114475, |
|
"loss": 0.4847, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0580178499221802, |
|
"learning_rate": 0.00019326839968734279, |
|
"loss": 1.0639, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.6212771534919739, |
|
"learning_rate": 0.00019320831555582908, |
|
"loss": 0.7302, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1953450441360474, |
|
"learning_rate": 0.00019314797389261424, |
|
"loss": 0.9873, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.856995940208435, |
|
"learning_rate": 0.00019308737486442045, |
|
"loss": 0.9573, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.36539939045906067, |
|
"learning_rate": 0.00019302651863868092, |
|
"loss": 0.6884, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.3269266188144684, |
|
"learning_rate": 0.0001929654053835395, |
|
"loss": 0.9445, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.46403074264526367, |
|
"learning_rate": 0.00019290403526785025, |
|
"loss": 0.9783, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 0.8782849311828613, |
|
"eval_runtime": 61.3598, |
|
"eval_samples_per_second": 1.63, |
|
"eval_steps_per_second": 1.63, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.6627680659294128, |
|
"learning_rate": 0.00019284240846117697, |
|
"loss": 0.9527, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.866802990436554, |
|
"learning_rate": 0.00019278052513379255, |
|
"loss": 0.6096, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5304962396621704, |
|
"learning_rate": 0.00019271838545667876, |
|
"loss": 0.8335, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.076063632965088, |
|
"learning_rate": 0.00019265598960152555, |
|
"loss": 1.3308, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 2.491516351699829, |
|
"learning_rate": 0.00019259333774073083, |
|
"loss": 1.4458, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.3771064281463623, |
|
"learning_rate": 0.00019253043004739968, |
|
"loss": 1.4581, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.24413131177425385, |
|
"learning_rate": 0.00019246726669534415, |
|
"loss": 0.7537, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.02517831325531, |
|
"learning_rate": 0.00019240384785908265, |
|
"loss": 1.0646, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.4848421514034271, |
|
"learning_rate": 0.00019234017371383945, |
|
"loss": 0.6972, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8870792388916016, |
|
"learning_rate": 0.00019227624443554425, |
|
"loss": 1.2114, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5171313285827637, |
|
"learning_rate": 0.00019221206020083166, |
|
"loss": 0.7243, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5975112915039062, |
|
"learning_rate": 0.00019214762118704076, |
|
"loss": 0.964, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0921701192855835, |
|
"learning_rate": 0.0001920829275722146, |
|
"loss": 1.1413, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.6540035009384155, |
|
"learning_rate": 0.00019201797953509955, |
|
"loss": 0.9732, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.137863278388977, |
|
"learning_rate": 0.0001919527772551451, |
|
"loss": 1.3374, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.4139158725738525, |
|
"learning_rate": 0.00019188732091250307, |
|
"loss": 1.1147, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5039550065994263, |
|
"learning_rate": 0.00019182161068802741, |
|
"loss": 0.7832, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.567670464515686, |
|
"learning_rate": 0.00019175564676327339, |
|
"loss": 0.6684, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.4372114837169647, |
|
"learning_rate": 0.0001916894293204973, |
|
"loss": 0.7285, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.4466225206851959, |
|
"learning_rate": 0.00019162295854265594, |
|
"loss": 0.5705, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.7975250482559204, |
|
"learning_rate": 0.00019155623461340594, |
|
"loss": 1.4155, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.6310514211654663, |
|
"learning_rate": 0.00019148925771710347, |
|
"loss": 0.7388, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5273220539093018, |
|
"learning_rate": 0.0001914220280388037, |
|
"loss": 0.9241, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.8354101181030273, |
|
"learning_rate": 0.0001913545457642601, |
|
"loss": 0.8085, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7362698316574097, |
|
"learning_rate": 0.00019128681107992415, |
|
"loss": 0.953, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5334580540657043, |
|
"learning_rate": 0.00019121882417294462, |
|
"loss": 0.4416, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6351854205131531, |
|
"learning_rate": 0.00019115058523116733, |
|
"loss": 0.6414, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.28386977314949036, |
|
"learning_rate": 0.00019108209444313433, |
|
"loss": 1.0273, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5504246354103088, |
|
"learning_rate": 0.00019101335199808354, |
|
"loss": 1.1191, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7449864149093628, |
|
"learning_rate": 0.00019094435808594823, |
|
"loss": 1.1073, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.6302490830421448, |
|
"learning_rate": 0.00019087511289735644, |
|
"loss": 1.2092, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5618910789489746, |
|
"learning_rate": 0.0001908056166236305, |
|
"loss": 1.1966, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.46393775939941406, |
|
"learning_rate": 0.0001907358694567865, |
|
"loss": 0.7148, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.34640607237815857, |
|
"learning_rate": 0.00019066587158953366, |
|
"loss": 1.1297, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.3277580738067627, |
|
"learning_rate": 0.00019059562321527396, |
|
"loss": 1.0978, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.8730579018592834, |
|
"learning_rate": 0.0001905251245281015, |
|
"loss": 0.9732, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.32950034737586975, |
|
"learning_rate": 0.00019045437572280194, |
|
"loss": 1.0795, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.48170116543769836, |
|
"learning_rate": 0.00019038337699485208, |
|
"loss": 0.8124, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.858323335647583, |
|
"learning_rate": 0.00019031212854041918, |
|
"loss": 0.813, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9366027116775513, |
|
"learning_rate": 0.00019024063055636057, |
|
"loss": 1.5074, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.4378308653831482, |
|
"learning_rate": 0.00019016888324022296, |
|
"loss": 0.8387, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5781106948852539, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 1.0496, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.834186851978302, |
|
"learning_rate": 0.00019002464140534147, |
|
"loss": 1.2684, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.752008855342865, |
|
"learning_rate": 0.00018995214728513343, |
|
"loss": 1.069, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.3941871225833893, |
|
"learning_rate": 0.0001898794046299167, |
|
"loss": 0.942, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.4069131314754486, |
|
"learning_rate": 0.0001898064136406771, |
|
"loss": 0.7116, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.6478765606880188, |
|
"learning_rate": 0.00018973317451908642, |
|
"loss": 0.9494, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.8658535480499268, |
|
"learning_rate": 0.0001896596874675021, |
|
"loss": 0.7592, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8622011542320251, |
|
"learning_rate": 0.0001895859526889666, |
|
"loss": 0.9392, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8127020001411438, |
|
"learning_rate": 0.00018951197038720688, |
|
"loss": 1.3309, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5042945146560669, |
|
"learning_rate": 0.0001894377407666337, |
|
"loss": 0.7607, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7252426743507385, |
|
"learning_rate": 0.00018936326403234125, |
|
"loss": 1.1221, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7334456443786621, |
|
"learning_rate": 0.0001892885403901064, |
|
"loss": 0.4738, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 2.6204662322998047, |
|
"learning_rate": 0.00018921357004638835, |
|
"loss": 1.2511, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.5708286762237549, |
|
"learning_rate": 0.00018913835320832778, |
|
"loss": 1.0887, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0324314832687378, |
|
"learning_rate": 0.00018906289008374655, |
|
"loss": 1.1019, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.3663407862186432, |
|
"learning_rate": 0.0001889871808811469, |
|
"loss": 1.0333, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7219849824905396, |
|
"learning_rate": 0.00018891122580971098, |
|
"loss": 0.858, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7850363850593567, |
|
"learning_rate": 0.00018883502507930042, |
|
"loss": 0.9503, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.28012195229530334, |
|
"learning_rate": 0.00018875857890045543, |
|
"loss": 0.8068, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7574068307876587, |
|
"learning_rate": 0.00018868188748439444, |
|
"loss": 0.7557, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9131019711494446, |
|
"learning_rate": 0.00018860495104301345, |
|
"loss": 1.1462, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.24085545539855957, |
|
"learning_rate": 0.00018852776978888551, |
|
"loss": 0.8286, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.4502617418766022, |
|
"learning_rate": 0.00018845034393526005, |
|
"loss": 1.0052, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.7258254289627075, |
|
"learning_rate": 0.00018837267369606228, |
|
"loss": 0.9703, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6078888773918152, |
|
"learning_rate": 0.00018829475928589271, |
|
"loss": 0.8479, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5912296772003174, |
|
"learning_rate": 0.00018821660092002641, |
|
"loss": 1.0336, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.3440995216369629, |
|
"learning_rate": 0.0001881381988144126, |
|
"loss": 0.7629, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5613306164741516, |
|
"learning_rate": 0.0001880595531856738, |
|
"loss": 1.0355, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5265874862670898, |
|
"learning_rate": 0.0001879806642511055, |
|
"loss": 0.9046, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.37300053238868713, |
|
"learning_rate": 0.0001879015322286754, |
|
"loss": 0.578, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7948945164680481, |
|
"learning_rate": 0.00018782215733702286, |
|
"loss": 0.5693, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.5222792625427246, |
|
"learning_rate": 0.0001877425397954582, |
|
"loss": 0.812, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6407319903373718, |
|
"learning_rate": 0.00018766267982396224, |
|
"loss": 0.7317, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.36041396856307983, |
|
"learning_rate": 0.00018758257764318567, |
|
"loss": 0.3617, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6465966105461121, |
|
"learning_rate": 0.00018750223347444828, |
|
"loss": 0.6037, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.4281207025051117, |
|
"learning_rate": 0.00018742164753973855, |
|
"loss": 0.5269, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.3671799898147583, |
|
"learning_rate": 0.00018734082006171299, |
|
"loss": 0.66, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.4369129240512848, |
|
"learning_rate": 0.00018725975126369535, |
|
"loss": 1.1395, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.4631548523902893, |
|
"learning_rate": 0.00018717844136967624, |
|
"loss": 0.7871, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.4736942946910858, |
|
"learning_rate": 0.00018709689060431242, |
|
"loss": 1.2983, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7346480488777161, |
|
"learning_rate": 0.00018701509919292613, |
|
"loss": 0.9507, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5298660397529602, |
|
"learning_rate": 0.00018693306736150444, |
|
"loss": 0.6621, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5501769781112671, |
|
"learning_rate": 0.0001868507953366989, |
|
"loss": 0.6954, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.565510630607605, |
|
"learning_rate": 0.0001867682833458245, |
|
"loss": 1.2279, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.2679019570350647, |
|
"learning_rate": 0.00018668553161685933, |
|
"loss": 0.6207, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.0185893774032593, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 1.1179, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.400493323802948, |
|
"learning_rate": 0.00018651930985988036, |
|
"loss": 0.5947, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.7746186256408691, |
|
"learning_rate": 0.00018643584029113215, |
|
"loss": 1.0365, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5792235136032104, |
|
"learning_rate": 0.0001863521319028231, |
|
"loss": 0.7102, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.35895833373069763, |
|
"learning_rate": 0.00018626818492623688, |
|
"loss": 0.5571, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.41158926486968994, |
|
"learning_rate": 0.0001861839995933164, |
|
"loss": 0.9009, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5845640301704407, |
|
"learning_rate": 0.00018609957613666315, |
|
"loss": 0.3317, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.4458400309085846, |
|
"learning_rate": 0.00018601491478953657, |
|
"loss": 1.0094, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6415822505950928, |
|
"learning_rate": 0.00018593001578585326, |
|
"loss": 0.9772, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.616220474243164, |
|
"learning_rate": 0.00018584487936018661, |
|
"loss": 0.6879, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.4885902404785156, |
|
"learning_rate": 0.00018575950574776595, |
|
"loss": 0.9627, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2818461060523987, |
|
"learning_rate": 0.0001856738951844759, |
|
"loss": 0.9156, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.2286068201065063, |
|
"learning_rate": 0.00018558804790685588, |
|
"loss": 2.6577, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.7086435556411743, |
|
"learning_rate": 0.00018550196415209914, |
|
"loss": 0.8172, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0317937135696411, |
|
"learning_rate": 0.00018541564415805258, |
|
"loss": 1.3381, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.693418562412262, |
|
"learning_rate": 0.00018532908816321558, |
|
"loss": 1.1259, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.25714910030365, |
|
"learning_rate": 0.00018524229640673974, |
|
"loss": 0.7892, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6042699813842773, |
|
"learning_rate": 0.00018515526912842796, |
|
"loss": 0.8982, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.3453720211982727, |
|
"learning_rate": 0.00018506800656873398, |
|
"loss": 0.9424, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.7436335682868958, |
|
"learning_rate": 0.0001849805089687615, |
|
"loss": 0.7121, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8308970928192139, |
|
"learning_rate": 0.00018489277657026375, |
|
"loss": 1.1099, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6892271637916565, |
|
"learning_rate": 0.0001848048096156426, |
|
"loss": 0.6814, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.30851200222969055, |
|
"learning_rate": 0.00018471660834794805, |
|
"loss": 0.283, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2706887722015381, |
|
"learning_rate": 0.00018462817301087748, |
|
"loss": 0.6258, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9876924157142639, |
|
"learning_rate": 0.00018453950384877504, |
|
"loss": 0.6983, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.3037252128124237, |
|
"learning_rate": 0.0001844506011066308, |
|
"loss": 0.9854, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.0091379880905151, |
|
"learning_rate": 0.00018436146503008035, |
|
"loss": 0.9871, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5219744443893433, |
|
"learning_rate": 0.0001842720958654039, |
|
"loss": 0.3771, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.49409008026123047, |
|
"learning_rate": 0.00018418249385952575, |
|
"loss": 1.0838, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.29014095664024353, |
|
"learning_rate": 0.00018409265926001343, |
|
"loss": 0.9922, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.3307441771030426, |
|
"learning_rate": 0.00018400259231507717, |
|
"loss": 1.0458, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.3356322646141052, |
|
"learning_rate": 0.00018391229327356916, |
|
"loss": 0.9891, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.3707556426525116, |
|
"learning_rate": 0.00018382176238498286, |
|
"loss": 0.9578, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 0.8826732635498047, |
|
"eval_runtime": 61.8974, |
|
"eval_samples_per_second": 1.616, |
|
"eval_steps_per_second": 1.616, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.7507327198982239, |
|
"learning_rate": 0.00018373099989945236, |
|
"loss": 0.8916, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.3686985373497009, |
|
"learning_rate": 0.00018364000606775155, |
|
"loss": 0.9855, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.34240958094596863, |
|
"learning_rate": 0.00018354878114129367, |
|
"loss": 1.0874, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.2911188304424286, |
|
"learning_rate": 0.00018345732537213027, |
|
"loss": 1.2217, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5415646433830261, |
|
"learning_rate": 0.0001833656390129509, |
|
"loss": 0.6675, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.36682239174842834, |
|
"learning_rate": 0.00018327372231708212, |
|
"loss": 0.8702, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5462591648101807, |
|
"learning_rate": 0.0001831815755384869, |
|
"loss": 0.9005, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5059930682182312, |
|
"learning_rate": 0.00018308919893176396, |
|
"loss": 0.8994, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6344266533851624, |
|
"learning_rate": 0.00018299659275214706, |
|
"loss": 1.1571, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.2552272081375122, |
|
"learning_rate": 0.00018290375725550417, |
|
"loss": 1.2492, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5543289184570312, |
|
"learning_rate": 0.00018281069269833692, |
|
"loss": 1.0141, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.3686586618423462, |
|
"learning_rate": 0.0001827173993377798, |
|
"loss": 0.8264, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5549390912055969, |
|
"learning_rate": 0.0001826238774315995, |
|
"loss": 1.0753, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8563418388366699, |
|
"learning_rate": 0.00018253012723819416, |
|
"loss": 0.4458, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.4292491376399994, |
|
"learning_rate": 0.00018243614901659264, |
|
"loss": 1.1994, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.37186571955680847, |
|
"learning_rate": 0.00018234194302645394, |
|
"loss": 0.9811, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.6655788421630859, |
|
"learning_rate": 0.00018224750952806624, |
|
"loss": 0.5048, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.7731723785400391, |
|
"learning_rate": 0.00018215284878234642, |
|
"loss": 0.9481, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.36243554949760437, |
|
"learning_rate": 0.00018205796105083915, |
|
"loss": 1.0048, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.08484947681427, |
|
"learning_rate": 0.00018196284659571639, |
|
"loss": 1.0245, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.128653883934021, |
|
"learning_rate": 0.00018186750567977637, |
|
"loss": 0.9997, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.6685619950294495, |
|
"learning_rate": 0.00018177193856644316, |
|
"loss": 1.3555, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.30426543951034546, |
|
"learning_rate": 0.00018167614551976567, |
|
"loss": 1.1209, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.6189528107643127, |
|
"learning_rate": 0.00018158012680441723, |
|
"loss": 1.0321, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.6775807738304138, |
|
"learning_rate": 0.00018148388268569453, |
|
"loss": 0.7826, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.4594517946243286, |
|
"learning_rate": 0.00018138741342951705, |
|
"loss": 0.6422, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.537011444568634, |
|
"learning_rate": 0.00018129071930242648, |
|
"loss": 0.9219, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.43772855401039124, |
|
"learning_rate": 0.00018119380057158568, |
|
"loss": 1.1737, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.7221130132675171, |
|
"learning_rate": 0.00018109665750477806, |
|
"loss": 0.8636, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.3437989354133606, |
|
"learning_rate": 0.00018099929037040694, |
|
"loss": 0.9238, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.47244492173194885, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 0.7715, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7109631299972534, |
|
"learning_rate": 0.0001808038849756822, |
|
"loss": 0.4109, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.27005669474601746, |
|
"learning_rate": 0.00018070584725522762, |
|
"loss": 0.7158, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.4006590247154236, |
|
"learning_rate": 0.00018060758654700622, |
|
"loss": 1.0167, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5627204179763794, |
|
"learning_rate": 0.00018050910312250931, |
|
"loss": 0.8679, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5019241571426392, |
|
"learning_rate": 0.00018041039725384352, |
|
"loss": 0.9163, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.00431227684021, |
|
"learning_rate": 0.00018031146921373018, |
|
"loss": 0.676, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.7062071561813354, |
|
"learning_rate": 0.0001802123192755044, |
|
"loss": 1.2407, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.6554285287857056, |
|
"learning_rate": 0.00018011294771311435, |
|
"loss": 1.1187, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.08072829246521, |
|
"learning_rate": 0.00018001335480112064, |
|
"loss": 0.4878, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.3923906981945038, |
|
"learning_rate": 0.00017991354081469538, |
|
"loss": 0.7836, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.20446747541427612, |
|
"learning_rate": 0.0001798135060296216, |
|
"loss": 0.4597, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5178759098052979, |
|
"learning_rate": 0.00017971325072229226, |
|
"loss": 1.7021, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5159180164337158, |
|
"learning_rate": 0.0001796127751697097, |
|
"loss": 0.6037, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.9448319673538208, |
|
"learning_rate": 0.0001795120796494848, |
|
"loss": 0.8965, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.0035223960876465, |
|
"learning_rate": 0.00017941116443983613, |
|
"loss": 0.9786, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.26040011644363403, |
|
"learning_rate": 0.00017931002981958933, |
|
"loss": 0.8624, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.518144965171814, |
|
"learning_rate": 0.00017920867606817625, |
|
"loss": 1.0095, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5256940722465515, |
|
"learning_rate": 0.00017910710346563416, |
|
"loss": 0.7392, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8347258567810059, |
|
"learning_rate": 0.000179005312292605, |
|
"loss": 0.8081, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8221095204353333, |
|
"learning_rate": 0.00017890330283033468, |
|
"loss": 1.1406, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.8048923015594482, |
|
"learning_rate": 0.00017880107536067218, |
|
"loss": 1.4362, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.9037342071533203, |
|
"learning_rate": 0.0001786986301660689, |
|
"loss": 1.2935, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5521582961082458, |
|
"learning_rate": 0.00017859596752957768, |
|
"loss": 1.0742, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.052284598350525, |
|
"learning_rate": 0.00017849308773485226, |
|
"loss": 0.7661, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.43000859022140503, |
|
"learning_rate": 0.00017838999106614632, |
|
"loss": 0.812, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.7804751396179199, |
|
"learning_rate": 0.00017828667780831278, |
|
"loss": 0.7995, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5827552080154419, |
|
"learning_rate": 0.000178183148246803, |
|
"loss": 0.6489, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.3453142642974854, |
|
"learning_rate": 0.00017807940266766593, |
|
"loss": 0.7154, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.24924832582473755, |
|
"learning_rate": 0.00017797544135754744, |
|
"loss": 0.8061, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.4459979236125946, |
|
"learning_rate": 0.0001778712646036894, |
|
"loss": 1.1167, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.6095878481864929, |
|
"learning_rate": 0.000177766872693929, |
|
"loss": 0.8344, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.43662723898887634, |
|
"learning_rate": 0.00017766226591669785, |
|
"loss": 1.0373, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.8759774565696716, |
|
"learning_rate": 0.00017755744456102122, |
|
"loss": 1.0988, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.1800742149353027, |
|
"learning_rate": 0.00017745240891651735, |
|
"loss": 0.7385, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5820197463035583, |
|
"learning_rate": 0.0001773471592733964, |
|
"loss": 0.9363, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.6128491759300232, |
|
"learning_rate": 0.00017724169592245995, |
|
"loss": 0.7762, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5693449378013611, |
|
"learning_rate": 0.0001771360191551, |
|
"loss": 0.7526, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.7725418210029602, |
|
"learning_rate": 0.00017703012926329815, |
|
"loss": 0.7019, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5068923234939575, |
|
"learning_rate": 0.0001769240265396249, |
|
"loss": 0.8308, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.34859699010849, |
|
"learning_rate": 0.0001768177112772388, |
|
"loss": 0.9593, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.34673023223876953, |
|
"learning_rate": 0.00017671118376988573, |
|
"loss": 1.0334, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5354735851287842, |
|
"learning_rate": 0.0001766044443118978, |
|
"loss": 1.2355, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.2567592859268188, |
|
"learning_rate": 0.0001764974931981929, |
|
"loss": 0.8935, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.4151657521724701, |
|
"learning_rate": 0.00017639033072427366, |
|
"loss": 1.1042, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.4307219386100769, |
|
"learning_rate": 0.00017628295718622665, |
|
"loss": 1.2273, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.6330164074897766, |
|
"learning_rate": 0.0001761753728807217, |
|
"loss": 1.2027, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.47434625029563904, |
|
"learning_rate": 0.00017606757810501088, |
|
"loss": 0.9242, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.0463887453079224, |
|
"learning_rate": 0.00017595957315692782, |
|
"loss": 1.151, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.7210713028907776, |
|
"learning_rate": 0.00017585135833488692, |
|
"loss": 0.8223, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5121049284934998, |
|
"learning_rate": 0.00017574293393788235, |
|
"loss": 0.6994, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.8933761119842529, |
|
"learning_rate": 0.00017563430026548734, |
|
"loss": 0.846, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5270050764083862, |
|
"learning_rate": 0.0001755254576178535, |
|
"loss": 0.7119, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.37028369307518005, |
|
"learning_rate": 0.0001754164062957096, |
|
"loss": 0.8623, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.6245588660240173, |
|
"learning_rate": 0.00017530714660036112, |
|
"loss": 1.0409, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.878105640411377, |
|
"learning_rate": 0.0001751976788336892, |
|
"loss": 0.6867, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.3765283226966858, |
|
"learning_rate": 0.00017508800329814995, |
|
"loss": 1.2251, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.4110933840274811, |
|
"learning_rate": 0.00017497812029677344, |
|
"loss": 0.8676, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.3986817598342896, |
|
"learning_rate": 0.000174868030133163, |
|
"loss": 1.6298, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.4310443103313446, |
|
"learning_rate": 0.0001747577331114945, |
|
"loss": 0.7328, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 1.5922423601150513, |
|
"learning_rate": 0.00017464722953651504, |
|
"loss": 0.6629, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 2.27004075050354, |
|
"learning_rate": 0.00017453651971354264, |
|
"loss": 1.4748, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.31181880831718445, |
|
"learning_rate": 0.00017442560394846516, |
|
"loss": 1.0477, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.1180263757705688, |
|
"learning_rate": 0.00017431448254773944, |
|
"loss": 0.9225, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.7490403056144714, |
|
"learning_rate": 0.00017420315581839044, |
|
"loss": 0.7847, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.138551115989685, |
|
"learning_rate": 0.0001740916240680105, |
|
"loss": 1.2782, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9375423789024353, |
|
"learning_rate": 0.0001739798876047584, |
|
"loss": 0.8316, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.0941681861877441, |
|
"learning_rate": 0.0001738679467373586, |
|
"loss": 0.9702, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.2845444083213806, |
|
"learning_rate": 0.00017375580177510016, |
|
"loss": 0.8563, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.7341310381889343, |
|
"learning_rate": 0.0001736434530278362, |
|
"loss": 0.7102, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.886854350566864, |
|
"learning_rate": 0.0001735309008059829, |
|
"loss": 1.4872, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.44623202085494995, |
|
"learning_rate": 0.00017341814542051845, |
|
"loss": 0.8142, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8813387155532837, |
|
"learning_rate": 0.00017330518718298264, |
|
"loss": 1.0869, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.8468006253242493, |
|
"learning_rate": 0.0001731920264054755, |
|
"loss": 1.2859, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.6797575354576111, |
|
"learning_rate": 0.00017307866340065685, |
|
"loss": 1.0288, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.25991517305374146, |
|
"learning_rate": 0.00017296509848174508, |
|
"loss": 0.7996, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.557244598865509, |
|
"learning_rate": 0.00017285133196251663, |
|
"loss": 0.6877, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.49986258149147034, |
|
"learning_rate": 0.00017273736415730488, |
|
"loss": 0.8285, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5839115977287292, |
|
"learning_rate": 0.0001726231953809993, |
|
"loss": 0.8617, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.1967806816101074, |
|
"learning_rate": 0.0001725088259490448, |
|
"loss": 0.719, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.7830839157104492, |
|
"learning_rate": 0.00017239425617744048, |
|
"loss": 0.623, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 2.0930089950561523, |
|
"learning_rate": 0.00017227948638273916, |
|
"loss": 1.1768, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.562641441822052, |
|
"learning_rate": 0.0001721645168820462, |
|
"loss": 0.5235, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5656068325042725, |
|
"learning_rate": 0.00017204934799301883, |
|
"loss": 0.9211, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.934866726398468, |
|
"learning_rate": 0.0001719339800338651, |
|
"loss": 0.7897, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.30100950598716736, |
|
"learning_rate": 0.00017181841332334318, |
|
"loss": 1.0436, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5494408011436462, |
|
"learning_rate": 0.00017170264818076026, |
|
"loss": 0.6412, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.6607212424278259, |
|
"learning_rate": 0.00017158668492597186, |
|
"loss": 1.3501, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5112847089767456, |
|
"learning_rate": 0.0001714705238793809, |
|
"loss": 0.4772, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.8900191783905029, |
|
"eval_runtime": 62.0282, |
|
"eval_samples_per_second": 1.612, |
|
"eval_steps_per_second": 1.612, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2824682593345642, |
|
"learning_rate": 0.0001713541653619368, |
|
"loss": 0.5561, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.28952574729919434, |
|
"learning_rate": 0.0001712376096951345, |
|
"loss": 1.1009, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.2117149382829666, |
|
"learning_rate": 0.00017112085720101373, |
|
"loss": 0.8326, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.4022321403026581, |
|
"learning_rate": 0.00017100390820215804, |
|
"loss": 0.5327, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.26270753145217896, |
|
"learning_rate": 0.00017088676302169393, |
|
"loss": 0.2126, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.44842302799224854, |
|
"learning_rate": 0.00017076942198328987, |
|
"loss": 0.5412, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.6974923610687256, |
|
"learning_rate": 0.00017065188541115554, |
|
"loss": 0.3271, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.5276718735694885, |
|
"learning_rate": 0.0001705341536300409, |
|
"loss": 0.4566, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.30386191606521606, |
|
"learning_rate": 0.00017041622696523518, |
|
"loss": 0.8557, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.2913454473018646, |
|
"learning_rate": 0.0001702981057425662, |
|
"loss": 0.6007, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5108489394187927, |
|
"learning_rate": 0.00017017979028839916, |
|
"loss": 0.8481, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.6002893447875977, |
|
"learning_rate": 0.00017006128092963605, |
|
"loss": 0.3967, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.34100326895713806, |
|
"learning_rate": 0.00016994257799371457, |
|
"loss": 0.2971, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.34491753578186035, |
|
"learning_rate": 0.00016982368180860728, |
|
"loss": 0.594, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.3472824692726135, |
|
"learning_rate": 0.00016970459270282067, |
|
"loss": 0.426, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.2740953266620636, |
|
"learning_rate": 0.00016958531100539427, |
|
"loss": 0.3144, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.26527807116508484, |
|
"learning_rate": 0.00016946583704589973, |
|
"loss": 0.6867, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.2539099156856537, |
|
"learning_rate": 0.00016934617115443992, |
|
"loss": 0.5074, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.34288522601127625, |
|
"learning_rate": 0.00016922631366164797, |
|
"loss": 0.5723, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.4750526249408722, |
|
"learning_rate": 0.00016910626489868649, |
|
"loss": 0.0978, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.3608519732952118, |
|
"learning_rate": 0.00016898602519724645, |
|
"loss": 0.4997, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.47260135412216187, |
|
"learning_rate": 0.00016886559488954648, |
|
"loss": 0.4042, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.7438716292381287, |
|
"learning_rate": 0.00016874497430833182, |
|
"loss": 0.2184, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.4565902650356293, |
|
"learning_rate": 0.0001686241637868734, |
|
"loss": 0.6163, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.5079072713851929, |
|
"learning_rate": 0.0001685031636589669, |
|
"loss": 0.3755, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.5474829077720642, |
|
"learning_rate": 0.00016838197425893202, |
|
"loss": 0.5009, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.4594095051288605, |
|
"learning_rate": 0.00016826059592161134, |
|
"loss": 0.7541, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.40495601296424866, |
|
"learning_rate": 0.00016813902898236939, |
|
"loss": 0.7832, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.5781679153442383, |
|
"learning_rate": 0.00016801727377709194, |
|
"loss": 0.6996, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.2371175289154053, |
|
"learning_rate": 0.00016789533064218485, |
|
"loss": 0.7179, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.6955786347389221, |
|
"learning_rate": 0.00016777319991457325, |
|
"loss": 0.478, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.1571210622787476, |
|
"learning_rate": 0.00016765088193170053, |
|
"loss": 0.5954, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.7339815497398376, |
|
"learning_rate": 0.00016752837703152754, |
|
"loss": 0.5888, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.29333943128585815, |
|
"learning_rate": 0.00016740568555253155, |
|
"loss": 0.4534, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.4680086374282837, |
|
"learning_rate": 0.0001672828078337053, |
|
"loss": 0.4307, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.6249940395355225, |
|
"learning_rate": 0.00016715974421455617, |
|
"loss": 0.2269, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.774949312210083, |
|
"learning_rate": 0.00016703649503510513, |
|
"loss": 0.6022, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.3996463418006897, |
|
"learning_rate": 0.00016691306063588583, |
|
"loss": 0.9525, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.4572999179363251, |
|
"learning_rate": 0.00016678944135794374, |
|
"loss": 0.5463, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.6360291838645935, |
|
"learning_rate": 0.00016666563754283515, |
|
"loss": 0.4418, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.5520346164703369, |
|
"learning_rate": 0.00016654164953262612, |
|
"loss": 0.6817, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 2.0099127292633057, |
|
"learning_rate": 0.0001664174776698917, |
|
"loss": 0.6017, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.4746718406677246, |
|
"learning_rate": 0.00016629312229771495, |
|
"loss": 1.0119, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.32624900341033936, |
|
"learning_rate": 0.00016616858375968595, |
|
"loss": 0.6034, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.459741473197937, |
|
"learning_rate": 0.00016604386239990078, |
|
"loss": 0.8181, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.8022719621658325, |
|
"learning_rate": 0.00016591895856296073, |
|
"loss": 0.4458, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.3159444034099579, |
|
"learning_rate": 0.00016579387259397127, |
|
"loss": 0.7185, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.4759514629840851, |
|
"learning_rate": 0.00016566860483854104, |
|
"loss": 0.5851, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.26993754506111145, |
|
"learning_rate": 0.000165543155642781, |
|
"loss": 0.4677, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.5374035239219666, |
|
"learning_rate": 0.00016541752535330345, |
|
"loss": 0.4579, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.6800963282585144, |
|
"learning_rate": 0.00016529171431722096, |
|
"loss": 0.7814, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.4050697088241577, |
|
"learning_rate": 0.00016516572288214552, |
|
"loss": 0.3604, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.4229757785797119, |
|
"learning_rate": 0.00016503955139618762, |
|
"loss": 0.4654, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5959732532501221, |
|
"learning_rate": 0.0001649132002079552, |
|
"loss": 0.8165, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.5716747045516968, |
|
"learning_rate": 0.00016478666966655264, |
|
"loss": 0.5398, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.381700873374939, |
|
"learning_rate": 0.00016465996012157995, |
|
"loss": 0.5922, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.7402687668800354, |
|
"learning_rate": 0.00016453307192313175, |
|
"loss": 0.5325, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.6408881545066833, |
|
"learning_rate": 0.00016440600542179615, |
|
"loss": 0.6203, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.7938200235366821, |
|
"learning_rate": 0.00016427876096865394, |
|
"loss": 0.4506, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.5587771534919739, |
|
"learning_rate": 0.0001641513389152777, |
|
"loss": 0.4175, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.35594528913497925, |
|
"learning_rate": 0.0001640237396137306, |
|
"loss": 0.9313, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 1.8496744632720947, |
|
"learning_rate": 0.0001638959634165656, |
|
"loss": 0.816, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.8227376341819763, |
|
"learning_rate": 0.00016376801067682434, |
|
"loss": 0.1371, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.9681535363197327, |
|
"learning_rate": 0.00016363988174803638, |
|
"loss": 0.6228, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.5398049354553223, |
|
"learning_rate": 0.0001635115769842179, |
|
"loss": 0.4775, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.34808897972106934, |
|
"learning_rate": 0.00016338309673987101, |
|
"loss": 0.8051, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 0.7682779431343079, |
|
"learning_rate": 0.0001632544413699828, |
|
"loss": 0.1041, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.4708777964115143, |
|
"learning_rate": 0.0001631256112300239, |
|
"loss": 0.5147, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.3061200976371765, |
|
"learning_rate": 0.00016299660667594814, |
|
"loss": 0.3265, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.540558934211731, |
|
"learning_rate": 0.00016286742806419108, |
|
"loss": 0.605, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.25295355916023254, |
|
"learning_rate": 0.00016273807575166926, |
|
"loss": 0.394, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.2657540440559387, |
|
"learning_rate": 0.0001626085500957791, |
|
"loss": 0.5722, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.551193118095398, |
|
"learning_rate": 0.000162478851454396, |
|
"loss": 0.2836, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.4108597934246063, |
|
"learning_rate": 0.00016234898018587337, |
|
"loss": 0.4717, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.4006686806678772, |
|
"learning_rate": 0.00016221893664904142, |
|
"loss": 0.5644, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.2879432439804077, |
|
"learning_rate": 0.0001620887212032065, |
|
"loss": 0.8188, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.45707154273986816, |
|
"learning_rate": 0.00016195833420814984, |
|
"loss": 0.8171, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.9172948002815247, |
|
"learning_rate": 0.00016182777602412665, |
|
"loss": 0.4868, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.1610100269317627, |
|
"learning_rate": 0.00016169704701186527, |
|
"loss": 0.8614, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.5628842115402222, |
|
"learning_rate": 0.0001615661475325658, |
|
"loss": 0.3256, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.1540424823760986, |
|
"learning_rate": 0.0001614350779478996, |
|
"loss": 0.4672, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.3111453354358673, |
|
"learning_rate": 0.0001613038386200078, |
|
"loss": 0.3977, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.48439329862594604, |
|
"learning_rate": 0.00016117242991150064, |
|
"loss": 0.6522, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.49099549651145935, |
|
"learning_rate": 0.00016104085218545633, |
|
"loss": 0.6117, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.4174061119556427, |
|
"learning_rate": 0.00016090910580542005, |
|
"loss": 0.4991, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.7796496152877808, |
|
"learning_rate": 0.00016077719113540302, |
|
"loss": 0.613, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.414725124835968, |
|
"learning_rate": 0.00016064510853988138, |
|
"loss": 0.4263, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.30187514424324036, |
|
"learning_rate": 0.00016051285838379525, |
|
"loss": 0.3254, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.4453214406967163, |
|
"learning_rate": 0.00016038044103254775, |
|
"loss": 0.2087, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.46796607971191406, |
|
"learning_rate": 0.00016024785685200395, |
|
"loss": 0.5363, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.049502968788147, |
|
"learning_rate": 0.00016011510620848987, |
|
"loss": 0.3565, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.5326039791107178, |
|
"learning_rate": 0.00015998218946879138, |
|
"loss": 0.7829, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.260036826133728, |
|
"learning_rate": 0.00015984910700015336, |
|
"loss": 0.5265, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.54875648021698, |
|
"learning_rate": 0.00015971585917027862, |
|
"loss": 0.2853, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.24017104506492615, |
|
"learning_rate": 0.00015958244634732674, |
|
"loss": 0.5476, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.0179369449615479, |
|
"learning_rate": 0.00015944886889991325, |
|
"loss": 0.4053, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.7541561722755432, |
|
"learning_rate": 0.00015931512719710855, |
|
"loss": 0.4962, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.7088395357131958, |
|
"learning_rate": 0.00015918122160843678, |
|
"loss": 1.24, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.2890975773334503, |
|
"learning_rate": 0.00015904715250387498, |
|
"loss": 0.8903, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.3250918388366699, |
|
"learning_rate": 0.000158912920253852, |
|
"loss": 0.5375, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 0.7649716734886169, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 0.4943, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.180847406387329, |
|
"learning_rate": 0.0001586439678013903, |
|
"loss": 0.5133, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.22564201056957245, |
|
"learning_rate": 0.00015850924834205895, |
|
"loss": 0.4563, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.396543025970459, |
|
"learning_rate": 0.000158374367223479, |
|
"loss": 0.3559, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.421466588973999, |
|
"learning_rate": 0.0001582393248183228, |
|
"loss": 0.4872, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.6064138412475586, |
|
"learning_rate": 0.00015810412149970833, |
|
"loss": 0.4341, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.4124835431575775, |
|
"learning_rate": 0.00015796875764119826, |
|
"loss": 0.415, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.7403770089149475, |
|
"learning_rate": 0.00015783323361679864, |
|
"loss": 0.391, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.0920146703720093, |
|
"learning_rate": 0.0001576975498009583, |
|
"loss": 0.6798, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.5007575154304504, |
|
"learning_rate": 0.00015756170656856737, |
|
"loss": 0.6142, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.3402646780014038, |
|
"learning_rate": 0.00015742570429495652, |
|
"loss": 0.7871, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.49383026361465454, |
|
"learning_rate": 0.0001572895433558958, |
|
"loss": 0.3604, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.6136035919189453, |
|
"learning_rate": 0.00015715322412759375, |
|
"loss": 0.854, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.41479477286338806, |
|
"learning_rate": 0.0001570167469866962, |
|
"loss": 0.1273, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.4173536002635956, |
|
"learning_rate": 0.00015688011231028518, |
|
"loss": 0.601, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.4010690748691559, |
|
"learning_rate": 0.0001567433204758782, |
|
"loss": 0.5361, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.45052483677864075, |
|
"learning_rate": 0.00015660637186142682, |
|
"loss": 0.4356, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.6652331352233887, |
|
"learning_rate": 0.00015646926684531585, |
|
"loss": 0.2981, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.568655788898468, |
|
"learning_rate": 0.0001563320058063622, |
|
"loss": 0.4653, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_loss": 0.9619532227516174, |
|
"eval_runtime": 61.5436, |
|
"eval_samples_per_second": 1.625, |
|
"eval_steps_per_second": 1.625, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.2903865575790405, |
|
"learning_rate": 0.00015619458912381396, |
|
"loss": 0.1973, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.7614594101905823, |
|
"learning_rate": 0.0001560570171773491, |
|
"loss": 0.6037, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.3540831208229065, |
|
"learning_rate": 0.0001559192903470747, |
|
"loss": 0.5743, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.4056189954280853, |
|
"learning_rate": 0.00015578140901352573, |
|
"loss": 0.6213, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.5303568840026855, |
|
"learning_rate": 0.00015564337355766412, |
|
"loss": 0.7036, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.5078765153884888, |
|
"learning_rate": 0.0001555051843608775, |
|
"loss": 0.411, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.9911600351333618, |
|
"learning_rate": 0.0001553668418049784, |
|
"loss": 0.1678, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.2692767083644867, |
|
"learning_rate": 0.000155228346272203, |
|
"loss": 0.4074, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.386247843503952, |
|
"learning_rate": 0.00015508969814521025, |
|
"loss": 0.5516, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 0.39059126377105713, |
|
"learning_rate": 0.0001549508978070806, |
|
"loss": 0.355, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.6897940039634705, |
|
"learning_rate": 0.00015481194564131512, |
|
"loss": 0.5107, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.523536741733551, |
|
"learning_rate": 0.00015467284203183435, |
|
"loss": 0.4266, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.37748581171035767, |
|
"learning_rate": 0.00015453358736297729, |
|
"loss": 0.7265, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.8041501641273499, |
|
"learning_rate": 0.00015439418201950025, |
|
"loss": 0.2922, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.36440232396125793, |
|
"learning_rate": 0.00015425462638657595, |
|
"loss": 0.6034, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.179821491241455, |
|
"learning_rate": 0.00015411492084979226, |
|
"loss": 0.3024, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.23662069439888, |
|
"learning_rate": 0.0001539750657951513, |
|
"loss": 0.4297, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.3672749102115631, |
|
"learning_rate": 0.00015383506160906825, |
|
"loss": 0.5914, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.42869898676872253, |
|
"learning_rate": 0.00015369490867837035, |
|
"loss": 0.8226, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.34783145785331726, |
|
"learning_rate": 0.00015355460739029586, |
|
"loss": 0.9571, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.5273573994636536, |
|
"learning_rate": 0.00015341415813249288, |
|
"loss": 0.4833, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.7641803026199341, |
|
"learning_rate": 0.0001532735612930184, |
|
"loss": 0.2554, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.423232764005661, |
|
"learning_rate": 0.00015313281726033715, |
|
"loss": 0.6854, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.42158639430999756, |
|
"learning_rate": 0.0001529919264233205, |
|
"loss": 0.6112, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.359598845243454, |
|
"learning_rate": 0.00015285088917124556, |
|
"loss": 0.6985, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.301753431558609, |
|
"learning_rate": 0.0001527097058937939, |
|
"loss": 0.6063, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.4818098545074463, |
|
"learning_rate": 0.00015256837698105047, |
|
"loss": 0.2259, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 1.6764453649520874, |
|
"learning_rate": 0.0001524269028235028, |
|
"loss": 0.8115, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.8210150003433228, |
|
"learning_rate": 0.00015228528381203962, |
|
"loss": 0.3319, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.5808678865432739, |
|
"learning_rate": 0.0001521435203379498, |
|
"loss": 0.3861, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.2755192816257477, |
|
"learning_rate": 0.00015200161279292155, |
|
"loss": 0.2439, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.4608403146266937, |
|
"learning_rate": 0.000151859561569041, |
|
"loss": 0.8368, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.6705602407455444, |
|
"learning_rate": 0.00015171736705879126, |
|
"loss": 0.5849, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.45606300234794617, |
|
"learning_rate": 0.00015157502965505143, |
|
"loss": 0.6276, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.5950043797492981, |
|
"learning_rate": 0.00015143254975109538, |
|
"loss": 0.6346, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 2.2966413497924805, |
|
"learning_rate": 0.00015128992774059063, |
|
"loss": 0.5741, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 1.1310389041900635, |
|
"learning_rate": 0.0001511471640175974, |
|
"loss": 0.7679, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.5066660046577454, |
|
"learning_rate": 0.00015100425897656753, |
|
"loss": 0.5976, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.534641683101654, |
|
"learning_rate": 0.00015086121301234316, |
|
"loss": 0.642, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.39099201560020447, |
|
"learning_rate": 0.0001507180265201559, |
|
"loss": 0.5081, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.5232080221176147, |
|
"learning_rate": 0.00015057469989562567, |
|
"loss": 0.2411, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.5573452115058899, |
|
"learning_rate": 0.00015043123353475943, |
|
"loss": 1.0094, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 2.027163505554199, |
|
"learning_rate": 0.00015028762783395034, |
|
"loss": 0.2975, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.46357014775276184, |
|
"learning_rate": 0.00015014388318997655, |
|
"loss": 0.8153, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.5300567746162415, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.7068, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.3386455476284027, |
|
"learning_rate": 0.00014985597866156559, |
|
"loss": 0.9168, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.2023034393787384, |
|
"learning_rate": 0.0001497118195725998, |
|
"loss": 0.382, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.2776263654232025, |
|
"learning_rate": 0.00014956752313140977, |
|
"loss": 0.693, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.6025331020355225, |
|
"learning_rate": 0.0001494230897366821, |
|
"loss": 0.7256, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.4146818518638611, |
|
"learning_rate": 0.00014927851978748178, |
|
"loss": 0.5046, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.3705938756465912, |
|
"learning_rate": 0.00014913381368325115, |
|
"loss": 0.7319, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.2651060223579407, |
|
"learning_rate": 0.0001489889718238087, |
|
"loss": 0.526, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.41096463799476624, |
|
"learning_rate": 0.00014884399460934805, |
|
"loss": 0.3067, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.22056828439235687, |
|
"learning_rate": 0.00014869888244043673, |
|
"loss": 0.5476, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.6910893321037292, |
|
"learning_rate": 0.00014855363571801523, |
|
"loss": 0.5243, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.4330577552318573, |
|
"learning_rate": 0.00014840825484339573, |
|
"loss": 0.4357, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.43148112297058105, |
|
"learning_rate": 0.0001482627402182611, |
|
"loss": 0.3913, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.4191662073135376, |
|
"learning_rate": 0.0001481170922446638, |
|
"loss": 0.2875, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.8446200489997864, |
|
"learning_rate": 0.00014797131132502465, |
|
"loss": 0.6589, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.7334960103034973, |
|
"learning_rate": 0.00014782539786213183, |
|
"loss": 0.6975, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.42938506603240967, |
|
"learning_rate": 0.00014767935225913975, |
|
"loss": 0.3074, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 2.6491518020629883, |
|
"learning_rate": 0.00014753317491956796, |
|
"loss": 0.6397, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.0263487100601196, |
|
"learning_rate": 0.00014738686624729986, |
|
"loss": 0.5503, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.4753721058368683, |
|
"learning_rate": 0.00014724042664658184, |
|
"loss": 1.0253, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.7451716065406799, |
|
"learning_rate": 0.00014709385652202203, |
|
"loss": 0.3982, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 1.6008319854736328, |
|
"learning_rate": 0.00014694715627858908, |
|
"loss": 0.212, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.4117997884750366, |
|
"learning_rate": 0.0001468003263216113, |
|
"loss": 0.3975, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.33254194259643555, |
|
"learning_rate": 0.00014665336705677534, |
|
"loss": 0.5263, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.039075255393982, |
|
"learning_rate": 0.00014650627889012507, |
|
"loss": 0.6964, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 7.058536052703857, |
|
"learning_rate": 0.00014635906222806058, |
|
"loss": 0.7029, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 1.0019419193267822, |
|
"learning_rate": 0.00014621171747733697, |
|
"loss": 0.6372, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 0.31377118825912476, |
|
"learning_rate": 0.00014606424504506324, |
|
"loss": 0.6574, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.46048787236213684, |
|
"learning_rate": 0.00014591664533870118, |
|
"loss": 0.3682, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.6753351092338562, |
|
"learning_rate": 0.0001457689187660642, |
|
"loss": 0.4969, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.4977830946445465, |
|
"learning_rate": 0.0001456210657353163, |
|
"loss": 0.4409, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.34079307317733765, |
|
"learning_rate": 0.00014547308665497082, |
|
"loss": 0.3568, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.651228129863739, |
|
"learning_rate": 0.0001453249819338894, |
|
"loss": 0.8023, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.34143513441085815, |
|
"learning_rate": 0.00014517675198128085, |
|
"loss": 0.6065, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.4881991446018219, |
|
"learning_rate": 0.00014502839720669989, |
|
"loss": 0.5526, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.28715357184410095, |
|
"learning_rate": 0.00014487991802004623, |
|
"loss": 0.5798, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.9956842660903931, |
|
"learning_rate": 0.00014473131483156327, |
|
"loss": 0.2817, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.4922630190849304, |
|
"learning_rate": 0.00014458258805183705, |
|
"loss": 0.3929, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.4495314061641693, |
|
"learning_rate": 0.00014443373809179508, |
|
"loss": 0.2987, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.7611126899719238, |
|
"learning_rate": 0.00014428476536270515, |
|
"loss": 0.9684, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.40701159834861755, |
|
"learning_rate": 0.0001441356702761744, |
|
"loss": 0.6025, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.7356356382369995, |
|
"learning_rate": 0.00014398645324414792, |
|
"loss": 0.8664, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.0680326223373413, |
|
"learning_rate": 0.00014383711467890774, |
|
"loss": 0.5663, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.4097828269004822, |
|
"learning_rate": 0.00014368765499307178, |
|
"loss": 0.5657, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.771783709526062, |
|
"learning_rate": 0.00014353807459959242, |
|
"loss": 0.3531, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.7945293188095093, |
|
"learning_rate": 0.00014338837391175582, |
|
"loss": 0.3199, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.1944937705993652, |
|
"learning_rate": 0.00014323855334318026, |
|
"loss": 0.495, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.3822149634361267, |
|
"learning_rate": 0.0001430886133078154, |
|
"loss": 0.5544, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.3462957441806793, |
|
"learning_rate": 0.00014293855421994094, |
|
"loss": 0.0456, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 1.3546236753463745, |
|
"learning_rate": 0.00014278837649416544, |
|
"loss": 0.5839, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.6205141544342041, |
|
"learning_rate": 0.0001426380805454254, |
|
"loss": 0.8681, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.4317213296890259, |
|
"learning_rate": 0.00014248766678898387, |
|
"loss": 0.3221, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.31438273191452026, |
|
"learning_rate": 0.00014233713564042937, |
|
"loss": 0.4549, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.6450943946838379, |
|
"learning_rate": 0.00014218648751567492, |
|
"loss": 0.3213, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.5037809610366821, |
|
"learning_rate": 0.00014203572283095657, |
|
"loss": 0.8642, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.26184359192848206, |
|
"learning_rate": 0.0001418848420028325, |
|
"loss": 0.4245, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.6354640126228333, |
|
"learning_rate": 0.0001417338454481818, |
|
"loss": 0.647, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5512405633926392, |
|
"learning_rate": 0.0001415827335842033, |
|
"loss": 0.701, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.30941978096961975, |
|
"learning_rate": 0.00014143150682841438, |
|
"loss": 0.3872, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5071930885314941, |
|
"learning_rate": 0.00014128016559864998, |
|
"loss": 0.3423, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.5384394526481628, |
|
"learning_rate": 0.00014112871031306119, |
|
"loss": 0.8117, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.7472350597381592, |
|
"learning_rate": 0.00014097714139011427, |
|
"loss": 0.2549, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.712765097618103, |
|
"learning_rate": 0.00014082545924858954, |
|
"loss": 0.5212, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.0152575969696045, |
|
"learning_rate": 0.00014067366430758004, |
|
"loss": 0.8595, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.4496733546257019, |
|
"learning_rate": 0.00014052175698649053, |
|
"loss": 0.8319, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 1.291023850440979, |
|
"learning_rate": 0.00014036973770503624, |
|
"loss": 0.7661, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.8034072518348694, |
|
"learning_rate": 0.00014021760688324176, |
|
"loss": 0.4416, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.48388585448265076, |
|
"learning_rate": 0.00014006536494143987, |
|
"loss": 0.6063, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.2967996895313263, |
|
"learning_rate": 0.0001399130123002703, |
|
"loss": 0.8233, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.4136080741882324, |
|
"learning_rate": 0.00013976054938067884, |
|
"loss": 0.2559, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.4075431525707245, |
|
"learning_rate": 0.0001396079766039157, |
|
"loss": 0.2941, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.5675719380378723, |
|
"learning_rate": 0.00013945529439153478, |
|
"loss": 0.5892, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.8851110339164734, |
|
"learning_rate": 0.00013930250316539238, |
|
"loss": 0.3651, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.8696531057357788, |
|
"learning_rate": 0.00013914960334764588, |
|
"loss": 0.2327, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.392892062664032, |
|
"learning_rate": 0.0001389965953607528, |
|
"loss": 0.5907, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"eval_loss": 0.953182578086853, |
|
"eval_runtime": 61.7108, |
|
"eval_samples_per_second": 1.62, |
|
"eval_steps_per_second": 1.62, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.42428654432296753, |
|
"learning_rate": 0.00013884347962746948, |
|
"loss": 0.3576, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 2.1607768535614014, |
|
"learning_rate": 0.00013869025657084995, |
|
"loss": 0.6363, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.4016755521297455, |
|
"learning_rate": 0.00013853692661424484, |
|
"loss": 0.3195, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 1.9975876808166504, |
|
"learning_rate": 0.00013838349018130007, |
|
"loss": 0.5935, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.41282904148101807, |
|
"learning_rate": 0.0001382299476959557, |
|
"loss": 0.8013, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.6295652985572815, |
|
"learning_rate": 0.00013807629958244498, |
|
"loss": 0.344, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.4839375615119934, |
|
"learning_rate": 0.00013792254626529286, |
|
"loss": 0.5274, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.37345823645591736, |
|
"learning_rate": 0.00013776868816931502, |
|
"loss": 0.6027, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.7521209716796875, |
|
"learning_rate": 0.00013761472571961663, |
|
"loss": 0.4461, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.3980889320373535, |
|
"learning_rate": 0.00013746065934159123, |
|
"loss": 0.502, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.5544388890266418, |
|
"learning_rate": 0.0001373064894609194, |
|
"loss": 0.2908, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.3128870129585266, |
|
"learning_rate": 0.0001371522165035678, |
|
"loss": 0.6016, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.45354199409484863, |
|
"learning_rate": 0.0001369978408957879, |
|
"loss": 0.9534, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.4182490110397339, |
|
"learning_rate": 0.00013684336306411468, |
|
"loss": 0.5976, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.1179499626159668, |
|
"learning_rate": 0.00013668878343536562, |
|
"loss": 0.4097, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.30916330218315125, |
|
"learning_rate": 0.00013653410243663952, |
|
"loss": 0.762, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.6563719511032104, |
|
"learning_rate": 0.00013637932049531516, |
|
"loss": 0.5264, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.6355602741241455, |
|
"learning_rate": 0.00013622443803905027, |
|
"loss": 0.4213, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 1.685316562652588, |
|
"learning_rate": 0.0001360694554957804, |
|
"loss": 0.5827, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.5541813373565674, |
|
"learning_rate": 0.00013591437329371736, |
|
"loss": 0.9299, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.40998753905296326, |
|
"learning_rate": 0.0001357591918613486, |
|
"loss": 0.7085, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.43177279829978943, |
|
"learning_rate": 0.00013560391162743569, |
|
"loss": 0.5073, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.5776886940002441, |
|
"learning_rate": 0.00013544853302101302, |
|
"loss": 0.5856, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 0.3610301911830902, |
|
"learning_rate": 0.00013529305647138687, |
|
"loss": 0.6362, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.4736679196357727, |
|
"learning_rate": 0.0001351374824081343, |
|
"loss": 0.4307, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.33841657638549805, |
|
"learning_rate": 0.0001349818112611015, |
|
"loss": 0.5294, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.5574224591255188, |
|
"learning_rate": 0.00013482604346040308, |
|
"loss": 0.5722, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.3317987620830536, |
|
"learning_rate": 0.00013467017943642073, |
|
"loss": 0.7735, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.6277002692222595, |
|
"learning_rate": 0.00013451421961980188, |
|
"loss": 0.5771, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.8945910334587097, |
|
"learning_rate": 0.0001343581644414587, |
|
"loss": 0.6094, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.2320517748594284, |
|
"learning_rate": 0.00013420201433256689, |
|
"loss": 0.502, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.44287192821502686, |
|
"learning_rate": 0.00013404576972456431, |
|
"loss": 0.6849, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 0.7059826850891113, |
|
"learning_rate": 0.00013388943104915003, |
|
"loss": 0.6448, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 1.8904210329055786, |
|
"learning_rate": 0.00013373299873828303, |
|
"loss": 0.58, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.4500790238380432, |
|
"learning_rate": 0.00013357647322418087, |
|
"loss": 0.4979, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.49751782417297363, |
|
"learning_rate": 0.00013341985493931877, |
|
"loss": 0.473, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.39845967292785645, |
|
"learning_rate": 0.00013326314431642822, |
|
"loss": 0.2201, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.30875203013420105, |
|
"learning_rate": 0.0001331063417884958, |
|
"loss": 0.2684, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.7159584164619446, |
|
"learning_rate": 0.00013294944778876214, |
|
"loss": 0.2587, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.6593707203865051, |
|
"learning_rate": 0.00013279246275072046, |
|
"loss": 0.5709, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.44912752509117126, |
|
"learning_rate": 0.0001326353871081156, |
|
"loss": 0.5268, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.7455440163612366, |
|
"learning_rate": 0.00013247822129494266, |
|
"loss": 0.6083, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.41440096497535706, |
|
"learning_rate": 0.00013232096574544602, |
|
"loss": 0.658, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.4352113604545593, |
|
"learning_rate": 0.00013216362089411783, |
|
"loss": 0.466, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.46454110741615295, |
|
"learning_rate": 0.00013200618717569714, |
|
"loss": 0.635, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.34865137934684753, |
|
"learning_rate": 0.00013184866502516845, |
|
"loss": 0.757, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.6294916272163391, |
|
"learning_rate": 0.00013169105487776056, |
|
"loss": 0.6611, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.5948600172996521, |
|
"learning_rate": 0.00013153335716894544, |
|
"loss": 0.6016, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.3446013033390045, |
|
"learning_rate": 0.00013137557233443707, |
|
"loss": 0.6529, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.9169672727584839, |
|
"learning_rate": 0.00013121770081018998, |
|
"loss": 0.3821, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.6247507333755493, |
|
"learning_rate": 0.00013105974303239838, |
|
"loss": 0.4863, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.3251171112060547, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 0.5048, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.24779891967773438, |
|
"learning_rate": 0.00013074357046214863, |
|
"loss": 0.7598, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.555656373500824, |
|
"learning_rate": 0.00013058535654326554, |
|
"loss": 0.4528, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.6941070556640625, |
|
"learning_rate": 0.00013042705811798565, |
|
"loss": 0.3112, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.29382842779159546, |
|
"learning_rate": 0.0001302686756236826, |
|
"loss": 0.2031, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 0.7080028057098389, |
|
"learning_rate": 0.00013011020949796237, |
|
"loss": 0.4234, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.6657422780990601, |
|
"learning_rate": 0.00012995166017866193, |
|
"loss": 0.4992, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.6290334463119507, |
|
"learning_rate": 0.0001297930281038482, |
|
"loss": 0.5391, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.37018316984176636, |
|
"learning_rate": 0.00012963431371181672, |
|
"loss": 0.7651, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.480070561170578, |
|
"learning_rate": 0.00012947551744109043, |
|
"loss": 0.6715, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.5766746401786804, |
|
"learning_rate": 0.00012931663973041855, |
|
"loss": 0.3435, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.46330827474594116, |
|
"learning_rate": 0.00012915768101877525, |
|
"loss": 0.4134, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.36259955167770386, |
|
"learning_rate": 0.00012899864174535864, |
|
"loss": 0.4665, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.9827147722244263, |
|
"learning_rate": 0.0001288395223495892, |
|
"loss": 0.8229, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.7168895602226257, |
|
"learning_rate": 0.00012868032327110904, |
|
"loss": 0.7883, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 0.503113865852356, |
|
"learning_rate": 0.00012852104494978024, |
|
"loss": 0.8635, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.48859935998916626, |
|
"learning_rate": 0.00012836168782568385, |
|
"loss": 0.4803, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.7904655337333679, |
|
"learning_rate": 0.00012820225233911876, |
|
"loss": 0.6279, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.2756960988044739, |
|
"learning_rate": 0.00012804273893060028, |
|
"loss": 0.2352, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.42820924520492554, |
|
"learning_rate": 0.00012788314804085903, |
|
"loss": 0.4173, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.3982052505016327, |
|
"learning_rate": 0.00012772348011083973, |
|
"loss": 0.4763, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.3944327235221863, |
|
"learning_rate": 0.0001275637355816999, |
|
"loss": 0.6724, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.5017027854919434, |
|
"learning_rate": 0.00012740391489480884, |
|
"loss": 0.9624, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.47813552618026733, |
|
"learning_rate": 0.0001272440184917461, |
|
"loss": 0.5583, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.4271741807460785, |
|
"learning_rate": 0.00012708404681430053, |
|
"loss": 0.6461, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 1.1472680568695068, |
|
"learning_rate": 0.00012692400030446893, |
|
"loss": 0.5068, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.30405759811401367, |
|
"learning_rate": 0.0001267638794044549, |
|
"loss": 0.4573, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.4505164921283722, |
|
"learning_rate": 0.00012660368455666752, |
|
"loss": 0.6588, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.504728376865387, |
|
"learning_rate": 0.00012644341620372023, |
|
"loss": 0.3744, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.8362789154052734, |
|
"learning_rate": 0.00012628307478842953, |
|
"loss": 0.2572, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.6768229007720947, |
|
"learning_rate": 0.00012612266075381386, |
|
"loss": 0.4818, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.1750926375389099, |
|
"learning_rate": 0.00012596217454309216, |
|
"loss": 0.2631, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5636873245239258, |
|
"learning_rate": 0.00012580161659968294, |
|
"loss": 0.414, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5396051406860352, |
|
"learning_rate": 0.00012564098736720283, |
|
"loss": 0.6055, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.5941131114959717, |
|
"learning_rate": 0.0001254802872894655, |
|
"loss": 0.4737, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.2654617130756378, |
|
"learning_rate": 0.0001253195168104802, |
|
"loss": 0.256, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.6253994703292847, |
|
"learning_rate": 0.00012515867637445086, |
|
"loss": 0.5111, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.35557523369789124, |
|
"learning_rate": 0.00012499776642577466, |
|
"loss": 0.4252, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.634124219417572, |
|
"learning_rate": 0.00012483678740904082, |
|
"loss": 0.4969, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 0.42550286650657654, |
|
"learning_rate": 0.00012467573976902935, |
|
"loss": 0.2964, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.8458385467529297, |
|
"learning_rate": 0.00012451462395071, |
|
"loss": 0.8494, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.3704116940498352, |
|
"learning_rate": 0.00012435344039924076, |
|
"loss": 0.566, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 2.0013949871063232, |
|
"learning_rate": 0.00012419218955996676, |
|
"loss": 0.8766, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.48777440190315247, |
|
"learning_rate": 0.0001240308718784192, |
|
"loss": 0.6281, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.6042513847351074, |
|
"learning_rate": 0.0001238694878003138, |
|
"loss": 0.6793, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.6612951755523682, |
|
"learning_rate": 0.00012370803777154977, |
|
"loss": 0.7568, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.4469200074672699, |
|
"learning_rate": 0.00012354652223820858, |
|
"loss": 0.6635, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.49686670303344727, |
|
"learning_rate": 0.00012338494164655268, |
|
"loss": 0.4435, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.3647472560405731, |
|
"learning_rate": 0.00012322329644302426, |
|
"loss": 0.3977, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.5049201846122742, |
|
"learning_rate": 0.00012306158707424403, |
|
"loss": 0.3614, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.0189746618270874, |
|
"learning_rate": 0.00012289981398700995, |
|
"loss": 0.2356, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.660258948802948, |
|
"learning_rate": 0.00012273797762829615, |
|
"loss": 0.6134, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 1.4135748147964478, |
|
"learning_rate": 0.00012257607844525146, |
|
"loss": 0.5336, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.6823858618736267, |
|
"learning_rate": 0.00012241411688519827, |
|
"loss": 0.5941, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.760971188545227, |
|
"learning_rate": 0.00012225209339563145, |
|
"loss": 0.5189, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.3797072470188141, |
|
"learning_rate": 0.00012209000842421688, |
|
"loss": 0.62, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.39116472005844116, |
|
"learning_rate": 0.00012192786241879033, |
|
"loss": 0.4138, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.4047084152698517, |
|
"learning_rate": 0.00012176565582735625, |
|
"loss": 0.836, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 0.2494349628686905, |
|
"learning_rate": 0.0001216033890980864, |
|
"loss": 0.5686, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.3844553232192993, |
|
"learning_rate": 0.00012144106267931876, |
|
"loss": 0.9203, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.6064060926437378, |
|
"learning_rate": 0.00012127867701955622, |
|
"loss": 0.4321, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.4317924380302429, |
|
"learning_rate": 0.00012111623256746538, |
|
"loss": 0.5162, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.6777471303939819, |
|
"learning_rate": 0.0001209537297718752, |
|
"loss": 0.3923, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.3700021207332611, |
|
"learning_rate": 0.00012079116908177593, |
|
"loss": 0.6012, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 2.029829502105713, |
|
"learning_rate": 0.00012062855094631778, |
|
"loss": 0.9376, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.513691246509552, |
|
"learning_rate": 0.00012046587581480953, |
|
"loss": 0.6733, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.2661391794681549, |
|
"learning_rate": 0.00012030314413671762, |
|
"loss": 0.6343, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.7096115946769714, |
|
"learning_rate": 0.00012014035636166468, |
|
"loss": 0.7364, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"eval_loss": 0.9359864592552185, |
|
"eval_runtime": 61.6239, |
|
"eval_samples_per_second": 1.623, |
|
"eval_steps_per_second": 1.623, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.976125955581665, |
|
"learning_rate": 0.00011997751293942827, |
|
"loss": 0.8367, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.4215434193611145, |
|
"learning_rate": 0.00011981461431993977, |
|
"loss": 0.749, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.7470360994338989, |
|
"learning_rate": 0.00011965166095328301, |
|
"loss": 0.6391, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.37723225355148315, |
|
"learning_rate": 0.00011948865328969317, |
|
"loss": 0.3735, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.47738218307495117, |
|
"learning_rate": 0.00011932559177955533, |
|
"loss": 0.2499, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.9608547687530518, |
|
"learning_rate": 0.00011916247687340347, |
|
"loss": 1.1033, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.8636322617530823, |
|
"learning_rate": 0.00011899930902191902, |
|
"loss": 0.4688, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.8400352001190186, |
|
"learning_rate": 0.0001188360886759297, |
|
"loss": 0.7346, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 2.1385717391967773, |
|
"learning_rate": 0.00011867281628640835, |
|
"loss": 0.6854, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.5982779860496521, |
|
"learning_rate": 0.00011850949230447145, |
|
"loss": 0.203, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.5503920316696167, |
|
"learning_rate": 0.00011834611718137824, |
|
"loss": 0.4089, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.3790716230869293, |
|
"learning_rate": 0.00011818269136852909, |
|
"loss": 0.8513, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.9155630469322205, |
|
"learning_rate": 0.00011801921531746444, |
|
"loss": 0.6007, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.3659191429615021, |
|
"learning_rate": 0.00011785568947986367, |
|
"loss": 0.3286, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.5917383432388306, |
|
"learning_rate": 0.00011769211430754357, |
|
"loss": 0.4512, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.5010570287704468, |
|
"learning_rate": 0.00011752849025245727, |
|
"loss": 0.783, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.6034415364265442, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.7131, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.33147016167640686, |
|
"learning_rate": 0.0001172010973024729, |
|
"loss": 0.7657, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.781022310256958, |
|
"learning_rate": 0.00011703732931215141, |
|
"loss": 0.3487, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5436139106750488, |
|
"learning_rate": 0.00011687351424821449, |
|
"loss": 0.667, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.3158597946166992, |
|
"learning_rate": 0.00011670965256327818, |
|
"loss": 0.7158, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.3362921178340912, |
|
"learning_rate": 0.00011654574471008713, |
|
"loss": 0.6947, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.4755779802799225, |
|
"learning_rate": 0.00011638179114151377, |
|
"loss": 0.4976, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.33236339688301086, |
|
"learning_rate": 0.00011621779231055676, |
|
"loss": 0.7231, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.30901312828063965, |
|
"learning_rate": 0.00011605374867033977, |
|
"loss": 0.5085, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.426242470741272, |
|
"learning_rate": 0.00011588966067411034, |
|
"loss": 0.5185, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.4367331862449646, |
|
"learning_rate": 0.00011572552877523854, |
|
"loss": 0.7451, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.4386216700077057, |
|
"learning_rate": 0.00011556135342721574, |
|
"loss": 0.6259, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 1.1260708570480347, |
|
"learning_rate": 0.00011539713508365335, |
|
"loss": 0.789, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.4716445803642273, |
|
"learning_rate": 0.00011523287419828163, |
|
"loss": 0.7175, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.2573232054710388, |
|
"learning_rate": 0.00011506857122494831, |
|
"loss": 0.3327, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.4652167558670044, |
|
"learning_rate": 0.00011490422661761744, |
|
"loss": 0.305, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.3845941424369812, |
|
"learning_rate": 0.00011473984083036813, |
|
"loss": 0.4717, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.5258691906929016, |
|
"learning_rate": 0.0001145754143173932, |
|
"loss": 0.2964, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.6041063070297241, |
|
"learning_rate": 0.00011441094753299801, |
|
"loss": 1.1448, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.9182747602462769, |
|
"learning_rate": 0.00011424644093159931, |
|
"loss": 0.7663, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.3786636292934418, |
|
"learning_rate": 0.00011408189496772368, |
|
"loss": 0.6454, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.39566946029663086, |
|
"learning_rate": 0.00011391731009600654, |
|
"loss": 0.8292, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.5287953615188599, |
|
"learning_rate": 0.00011375268677119089, |
|
"loss": 0.4593, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.4483492970466614, |
|
"learning_rate": 0.00011358802544812584, |
|
"loss": 0.1606, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.38028889894485474, |
|
"learning_rate": 0.00011342332658176555, |
|
"loss": 0.3923, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.660707950592041, |
|
"learning_rate": 0.00011325859062716795, |
|
"loss": 0.3406, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.6110001802444458, |
|
"learning_rate": 0.00011309381803949333, |
|
"loss": 0.3173, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.7987465858459473, |
|
"learning_rate": 0.00011292900927400333, |
|
"loss": 0.4357, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.6956415176391602, |
|
"learning_rate": 0.00011276416478605949, |
|
"loss": 0.3612, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.6051459908485413, |
|
"learning_rate": 0.00011259928503112198, |
|
"loss": 0.2962, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.41652801632881165, |
|
"learning_rate": 0.00011243437046474853, |
|
"loss": 0.5236, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.8061907887458801, |
|
"learning_rate": 0.000112269421542593, |
|
"loss": 0.8685, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.6048658490180969, |
|
"learning_rate": 0.00011210443872040414, |
|
"loss": 0.4611, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.4245402216911316, |
|
"learning_rate": 0.00011193942245402443, |
|
"loss": 0.903, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.6208779215812683, |
|
"learning_rate": 0.00011177437319938875, |
|
"loss": 0.6526, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 0.8669800162315369, |
|
"learning_rate": 0.00011160929141252303, |
|
"loss": 0.7721, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.3639528751373291, |
|
"learning_rate": 0.0001114441775495432, |
|
"loss": 0.2891, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5209739804267883, |
|
"learning_rate": 0.00011127903206665378, |
|
"loss": 0.8096, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.41638287901878357, |
|
"learning_rate": 0.00011111385542014663, |
|
"loss": 0.2956, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5916814804077148, |
|
"learning_rate": 0.00011094864806639971, |
|
"loss": 0.5097, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.44301638007164, |
|
"learning_rate": 0.00011078341046187589, |
|
"loss": 0.7395, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.3692547380924225, |
|
"learning_rate": 0.00011061814306312152, |
|
"loss": 0.4997, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.0468300580978394, |
|
"learning_rate": 0.00011045284632676536, |
|
"loss": 0.7649, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 1.3064887523651123, |
|
"learning_rate": 0.0001102875207095172, |
|
"loss": 0.5308, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.904100239276886, |
|
"learning_rate": 0.00011012216666816659, |
|
"loss": 0.5012, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.6699868440628052, |
|
"learning_rate": 0.00010995678465958168, |
|
"loss": 0.362, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.24509219825267792, |
|
"learning_rate": 0.00010979137514070782, |
|
"loss": 0.3418, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.4075641632080078, |
|
"learning_rate": 0.00010962593856856649, |
|
"loss": 0.5487, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.5983563661575317, |
|
"learning_rate": 0.00010946047540025372, |
|
"loss": 0.515, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.3288837671279907, |
|
"learning_rate": 0.00010929498609293924, |
|
"loss": 0.6094, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.4095224440097809, |
|
"learning_rate": 0.00010912947110386484, |
|
"loss": 0.4269, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.9093605875968933, |
|
"learning_rate": 0.00010896393089034336, |
|
"loss": 0.4983, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.29987531900405884, |
|
"learning_rate": 0.00010879836590975731, |
|
"loss": 0.363, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.4612436592578888, |
|
"learning_rate": 0.00010863277661955758, |
|
"loss": 0.6466, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.8924248814582825, |
|
"learning_rate": 0.00010846716347726233, |
|
"loss": 0.3571, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.2619093954563141, |
|
"learning_rate": 0.00010830152694045552, |
|
"loss": 0.568, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.9936345815658569, |
|
"learning_rate": 0.00010813586746678583, |
|
"loss": 0.5164, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.6397780776023865, |
|
"learning_rate": 0.00010797018551396527, |
|
"loss": 0.4196, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.5577610731124878, |
|
"learning_rate": 0.00010780448153976793, |
|
"loss": 0.7255, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.5776187777519226, |
|
"learning_rate": 0.00010763875600202879, |
|
"loss": 0.371, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.3465639650821686, |
|
"learning_rate": 0.00010747300935864243, |
|
"loss": 0.272, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.4619837999343872, |
|
"learning_rate": 0.00010730724206756168, |
|
"loss": 0.2819, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.34520411491394043, |
|
"learning_rate": 0.00010714145458679649, |
|
"loss": 0.6186, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.6020563840866089, |
|
"learning_rate": 0.00010697564737441252, |
|
"loss": 0.2653, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.6317043304443359, |
|
"learning_rate": 0.00010680982088853002, |
|
"loss": 0.3674, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.3305717706680298, |
|
"learning_rate": 0.00010664397558732244, |
|
"loss": 0.6258, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.6932215690612793, |
|
"learning_rate": 0.00010647811192901518, |
|
"loss": 0.2891, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.7648591995239258, |
|
"learning_rate": 0.00010631223037188449, |
|
"loss": 0.4415, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.066448450088501, |
|
"learning_rate": 0.00010614633137425598, |
|
"loss": 0.6719, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.4558663070201874, |
|
"learning_rate": 0.00010598041539450343, |
|
"loss": 0.3681, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.6853288412094116, |
|
"learning_rate": 0.00010581448289104758, |
|
"loss": 0.5901, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.6174774169921875, |
|
"learning_rate": 0.00010564853432235486, |
|
"loss": 0.5672, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.3689481019973755, |
|
"learning_rate": 0.00010548257014693601, |
|
"loss": 0.5118, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.6491348743438721, |
|
"learning_rate": 0.00010531659082334495, |
|
"loss": 0.756, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.7349205613136292, |
|
"learning_rate": 0.0001051505968101774, |
|
"loss": 0.5861, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.5765767693519592, |
|
"learning_rate": 0.00010498458856606972, |
|
"loss": 0.6011, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.28173473477363586, |
|
"learning_rate": 0.00010481856654969758, |
|
"loss": 1.0505, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.6159635782241821, |
|
"learning_rate": 0.0001046525312197747, |
|
"loss": 0.4108, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.569395899772644, |
|
"learning_rate": 0.00010448648303505151, |
|
"loss": 0.4487, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.7190093994140625, |
|
"learning_rate": 0.00010432042245431406, |
|
"loss": 0.1999, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.5158098340034485, |
|
"learning_rate": 0.00010415434993638269, |
|
"loss": 0.7024, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.4488438665866852, |
|
"learning_rate": 0.0001039882659401105, |
|
"loss": 0.4289, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.4636310935020447, |
|
"learning_rate": 0.00010382217092438255, |
|
"loss": 0.2705, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.72694993019104, |
|
"learning_rate": 0.00010365606534811423, |
|
"loss": 0.2194, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.4917081296443939, |
|
"learning_rate": 0.00010348994967025012, |
|
"loss": 1.0842, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.6921563744544983, |
|
"learning_rate": 0.00010332382434976266, |
|
"loss": 0.5801, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.4937489628791809, |
|
"learning_rate": 0.0001031576898456511, |
|
"loss": 0.7167, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.23887041211128235, |
|
"learning_rate": 0.00010299154661693987, |
|
"loss": 0.6585, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.6684651970863342, |
|
"learning_rate": 0.00010282539512267757, |
|
"loss": 0.6252, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.2771519124507904, |
|
"learning_rate": 0.00010265923582193573, |
|
"loss": 0.6683, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.4243355393409729, |
|
"learning_rate": 0.0001024930691738073, |
|
"loss": 0.1698, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5778141617774963, |
|
"learning_rate": 0.00010232689563740563, |
|
"loss": 0.6546, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.5111245512962341, |
|
"learning_rate": 0.00010216071567186312, |
|
"loss": 0.2172, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.43212172389030457, |
|
"learning_rate": 0.00010199452973632981, |
|
"loss": 0.6029, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.5338983535766602, |
|
"learning_rate": 0.00010182833828997238, |
|
"loss": 0.5504, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.9532368779182434, |
|
"learning_rate": 0.00010166214179197264, |
|
"loss": 0.4724, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.3191937804222107, |
|
"learning_rate": 0.00010149594070152638, |
|
"loss": 0.7334, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.7247849702835083, |
|
"learning_rate": 0.0001013297354778421, |
|
"loss": 0.5641, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.3969646692276001, |
|
"learning_rate": 0.00010116352658013973, |
|
"loss": 0.3362, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.1442451477050781, |
|
"learning_rate": 0.00010099731446764926, |
|
"loss": 0.4536, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.36780816316604614, |
|
"learning_rate": 0.00010083109959960973, |
|
"loss": 0.3626, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.2609235644340515, |
|
"learning_rate": 0.00010066488243526761, |
|
"loss": 0.6026, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.31235870718955994, |
|
"learning_rate": 0.00010049866343387581, |
|
"loss": 0.2611, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.9570428729057312, |
|
"eval_runtime": 61.8021, |
|
"eval_samples_per_second": 1.618, |
|
"eval_steps_per_second": 1.618, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.29149293899536133, |
|
"learning_rate": 0.00010033244305469234, |
|
"loss": 0.1439, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.48990726470947266, |
|
"learning_rate": 0.00010016622175697898, |
|
"loss": 0.3245, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.8410664200782776, |
|
"learning_rate": 0.0001, |
|
"loss": 0.4141, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.23176737129688263, |
|
"learning_rate": 9.983377824302106e-05, |
|
"loss": 0.4396, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.37275928258895874, |
|
"learning_rate": 9.966755694530767e-05, |
|
"loss": 0.384, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.34968245029449463, |
|
"learning_rate": 9.950133656612421e-05, |
|
"loss": 0.3296, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.6505431532859802, |
|
"learning_rate": 9.933511756473244e-05, |
|
"loss": 0.1885, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 1.4213581085205078, |
|
"learning_rate": 9.916890040039031e-05, |
|
"loss": 0.4766, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 0.5529109835624695, |
|
"learning_rate": 9.900268553235076e-05, |
|
"loss": 0.2335, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.3431006371974945, |
|
"learning_rate": 9.883647341986032e-05, |
|
"loss": 0.4784, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.4143410623073578, |
|
"learning_rate": 9.867026452215792e-05, |
|
"loss": 0.6342, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.3886817395687103, |
|
"learning_rate": 9.850405929847366e-05, |
|
"loss": 0.1899, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.3255102336406708, |
|
"learning_rate": 9.833785820802739e-05, |
|
"loss": 0.1885, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.23849962651729584, |
|
"learning_rate": 9.817166171002765e-05, |
|
"loss": 0.2219, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.6556320190429688, |
|
"learning_rate": 9.800547026367022e-05, |
|
"loss": 0.2923, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.7272720336914062, |
|
"learning_rate": 9.783928432813688e-05, |
|
"loss": 0.5208, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.3971239924430847, |
|
"learning_rate": 9.767310436259438e-05, |
|
"loss": 0.2383, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.2979855537414551, |
|
"learning_rate": 9.750693082619273e-05, |
|
"loss": 0.1653, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.6237159967422485, |
|
"learning_rate": 9.734076417806428e-05, |
|
"loss": 0.0886, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.9377636909484863, |
|
"learning_rate": 9.717460487732245e-05, |
|
"loss": 0.2564, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.19992011785507202, |
|
"learning_rate": 9.700845338306018e-05, |
|
"loss": 0.0384, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.3822425603866577, |
|
"learning_rate": 9.68423101543489e-05, |
|
"loss": 0.1232, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.2883025109767914, |
|
"learning_rate": 9.667617565023735e-05, |
|
"loss": 0.2164, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.6428910493850708, |
|
"learning_rate": 9.651005032974994e-05, |
|
"loss": 0.1398, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.28312209248542786, |
|
"learning_rate": 9.634393465188578e-05, |
|
"loss": 0.11, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.8573331236839294, |
|
"learning_rate": 9.617782907561748e-05, |
|
"loss": 0.2709, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.24638248980045319, |
|
"learning_rate": 9.601173405988953e-05, |
|
"loss": 0.1721, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.2952326834201813, |
|
"learning_rate": 9.584565006361734e-05, |
|
"loss": 0.5673, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.417782723903656, |
|
"learning_rate": 9.567957754568596e-05, |
|
"loss": 0.4372, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.4788338840007782, |
|
"learning_rate": 9.551351696494854e-05, |
|
"loss": 0.1572, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.42218559980392456, |
|
"learning_rate": 9.534746878022534e-05, |
|
"loss": 0.3792, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.8672342896461487, |
|
"learning_rate": 9.518143345030246e-05, |
|
"loss": 0.589, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.3684612214565277, |
|
"learning_rate": 9.501541143393028e-05, |
|
"loss": 0.1279, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.533931314945221, |
|
"learning_rate": 9.48494031898226e-05, |
|
"loss": 0.1256, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.2679530084133148, |
|
"learning_rate": 9.468340917665508e-05, |
|
"loss": 0.1006, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.2058538943529129, |
|
"learning_rate": 9.451742985306398e-05, |
|
"loss": 0.0577, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.3021174669265747, |
|
"learning_rate": 9.435146567764515e-05, |
|
"loss": 0.2261, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.6816737055778503, |
|
"learning_rate": 9.418551710895243e-05, |
|
"loss": 0.3723, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.4222981929779053, |
|
"learning_rate": 9.401958460549658e-05, |
|
"loss": 0.0325, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.16284583508968353, |
|
"learning_rate": 9.385366862574404e-05, |
|
"loss": 0.1287, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.3745120763778687, |
|
"learning_rate": 9.368776962811552e-05, |
|
"loss": 0.3241, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.36204075813293457, |
|
"learning_rate": 9.352188807098481e-05, |
|
"loss": 0.2495, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.19530382752418518, |
|
"learning_rate": 9.335602441267759e-05, |
|
"loss": 0.2201, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.2768145203590393, |
|
"learning_rate": 9.319017911147e-05, |
|
"loss": 0.2036, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.28405070304870605, |
|
"learning_rate": 9.302435262558747e-05, |
|
"loss": 0.4085, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 4.290142059326172, |
|
"learning_rate": 9.285854541320352e-05, |
|
"loss": 0.0938, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.0401676893234253, |
|
"learning_rate": 9.269275793243833e-05, |
|
"loss": 0.5443, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.6484922170639038, |
|
"learning_rate": 9.252699064135758e-05, |
|
"loss": 0.5032, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.3814872205257416, |
|
"learning_rate": 9.236124399797122e-05, |
|
"loss": 0.3484, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.5454087257385254, |
|
"learning_rate": 9.219551846023211e-05, |
|
"loss": 0.0768, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.7552561163902283, |
|
"learning_rate": 9.202981448603477e-05, |
|
"loss": 0.3679, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 1.8529096841812134, |
|
"learning_rate": 9.186413253321418e-05, |
|
"loss": 0.3955, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.18287956714630127, |
|
"learning_rate": 9.169847305954447e-05, |
|
"loss": 0.0373, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.302222341299057, |
|
"learning_rate": 9.153283652273768e-05, |
|
"loss": 0.3458, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.3268778324127197, |
|
"learning_rate": 9.136722338044243e-05, |
|
"loss": 0.0853, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.2930258512496948, |
|
"learning_rate": 9.120163409024271e-05, |
|
"loss": 0.1806, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 0.4745717942714691, |
|
"learning_rate": 9.103606910965666e-05, |
|
"loss": 0.204, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.45598700642585754, |
|
"learning_rate": 9.087052889613518e-05, |
|
"loss": 0.1858, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.23771706223487854, |
|
"learning_rate": 9.070501390706079e-05, |
|
"loss": 0.0799, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.6027151942253113, |
|
"learning_rate": 9.05395245997463e-05, |
|
"loss": 0.398, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.8044284582138062, |
|
"learning_rate": 9.037406143143356e-05, |
|
"loss": 0.1046, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.23316510021686554, |
|
"learning_rate": 9.020862485929219e-05, |
|
"loss": 0.2189, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.4902825355529785, |
|
"learning_rate": 9.004321534041835e-05, |
|
"loss": 0.2999, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.3512743413448334, |
|
"learning_rate": 8.987783333183344e-05, |
|
"loss": 0.0409, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.29709306359291077, |
|
"learning_rate": 8.971247929048283e-05, |
|
"loss": 0.19, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.6619227528572083, |
|
"learning_rate": 8.954715367323468e-05, |
|
"loss": 0.1516, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.5215200781822205, |
|
"learning_rate": 8.938185693687853e-05, |
|
"loss": 0.3649, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.28022971749305725, |
|
"learning_rate": 8.921658953812415e-05, |
|
"loss": 0.2562, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.41529372334480286, |
|
"learning_rate": 8.905135193360033e-05, |
|
"loss": 0.228, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.3765081465244293, |
|
"learning_rate": 8.888614457985341e-05, |
|
"loss": 0.079, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.6531975865364075, |
|
"learning_rate": 8.872096793334624e-05, |
|
"loss": 0.3251, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.3692251145839691, |
|
"learning_rate": 8.855582245045683e-05, |
|
"loss": 0.1668, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.4639212489128113, |
|
"learning_rate": 8.839070858747697e-05, |
|
"loss": 0.2352, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.5719773769378662, |
|
"learning_rate": 8.822562680061125e-05, |
|
"loss": 0.7878, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.2982478439807892, |
|
"learning_rate": 8.806057754597558e-05, |
|
"loss": 0.2832, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.2849707305431366, |
|
"learning_rate": 8.789556127959585e-05, |
|
"loss": 0.3516, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.16983425617218018, |
|
"learning_rate": 8.773057845740702e-05, |
|
"loss": 0.0755, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.7474312782287598, |
|
"learning_rate": 8.756562953525152e-05, |
|
"loss": 0.1744, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.44700291752815247, |
|
"learning_rate": 8.740071496887803e-05, |
|
"loss": 0.1424, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.5309647917747498, |
|
"learning_rate": 8.723583521394054e-05, |
|
"loss": 0.047, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.3638722002506256, |
|
"learning_rate": 8.70709907259967e-05, |
|
"loss": 0.4982, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 1.1174355745315552, |
|
"learning_rate": 8.690618196050666e-05, |
|
"loss": 0.197, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.3625263571739197, |
|
"learning_rate": 8.674140937283208e-05, |
|
"loss": 0.0991, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.883642315864563, |
|
"learning_rate": 8.657667341823448e-05, |
|
"loss": 0.451, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.32223352789878845, |
|
"learning_rate": 8.641197455187417e-05, |
|
"loss": 0.4483, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.39049550890922546, |
|
"learning_rate": 8.624731322880912e-05, |
|
"loss": 0.2865, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.886722981929779, |
|
"learning_rate": 8.608268990399349e-05, |
|
"loss": 0.4363, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.8225802183151245, |
|
"learning_rate": 8.591810503227635e-05, |
|
"loss": 0.0689, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.5393767356872559, |
|
"learning_rate": 8.575355906840072e-05, |
|
"loss": 0.2782, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.3159850835800171, |
|
"learning_rate": 8.558905246700201e-05, |
|
"loss": 0.4313, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.17237627506256104, |
|
"learning_rate": 8.542458568260682e-05, |
|
"loss": 0.0598, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.3424827456474304, |
|
"learning_rate": 8.526015916963191e-05, |
|
"loss": 0.0955, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.22092819213867188, |
|
"learning_rate": 8.509577338238255e-05, |
|
"loss": 0.1096, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.3480028212070465, |
|
"learning_rate": 8.49314287750517e-05, |
|
"loss": 0.499, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.24549205601215363, |
|
"learning_rate": 8.476712580171838e-05, |
|
"loss": 0.2929, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.4028603136539459, |
|
"learning_rate": 8.460286491634663e-05, |
|
"loss": 0.3242, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.6208504438400269, |
|
"learning_rate": 8.443864657278428e-05, |
|
"loss": 0.4581, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.3889882266521454, |
|
"learning_rate": 8.427447122476148e-05, |
|
"loss": 0.4558, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.3713911771774292, |
|
"learning_rate": 8.411033932588967e-05, |
|
"loss": 0.5298, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.4244590401649475, |
|
"learning_rate": 8.394625132966025e-05, |
|
"loss": 0.2284, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.3483653664588928, |
|
"learning_rate": 8.378220768944327e-05, |
|
"loss": 0.3504, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 1.9254424571990967, |
|
"learning_rate": 8.361820885848624e-05, |
|
"loss": 0.2707, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.606145977973938, |
|
"learning_rate": 8.345425528991288e-05, |
|
"loss": 0.3264, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.5714905261993408, |
|
"learning_rate": 8.329034743672187e-05, |
|
"loss": 0.1881, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.8185067176818848, |
|
"learning_rate": 8.31264857517855e-05, |
|
"loss": 0.362, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.6066401600837708, |
|
"learning_rate": 8.296267068784862e-05, |
|
"loss": 0.3876, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.4317091107368469, |
|
"learning_rate": 8.279890269752715e-05, |
|
"loss": 0.4905, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.5521796941757202, |
|
"learning_rate": 8.263518223330697e-05, |
|
"loss": 0.1143, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 0.5201196670532227, |
|
"learning_rate": 8.247150974754275e-05, |
|
"loss": 0.2074, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.6514804363250732, |
|
"learning_rate": 8.230788569245648e-05, |
|
"loss": 0.1698, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.3170391917228699, |
|
"learning_rate": 8.214431052013634e-05, |
|
"loss": 0.1715, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.2641740143299103, |
|
"learning_rate": 8.198078468253557e-05, |
|
"loss": 0.2376, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.49258503317832947, |
|
"learning_rate": 8.181730863147093e-05, |
|
"loss": 0.8573, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.30264657735824585, |
|
"learning_rate": 8.165388281862178e-05, |
|
"loss": 0.1837, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.26163825392723083, |
|
"learning_rate": 8.149050769552856e-05, |
|
"loss": 0.1082, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.263866126537323, |
|
"learning_rate": 8.132718371359166e-05, |
|
"loss": 0.0358, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.3995482623577118, |
|
"learning_rate": 8.116391132407033e-05, |
|
"loss": 0.5514, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.39222079515457153, |
|
"learning_rate": 8.100069097808103e-05, |
|
"loss": 0.4839, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.2744399309158325, |
|
"learning_rate": 8.083752312659654e-05, |
|
"loss": 0.1999, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_loss": 1.0414544343948364, |
|
"eval_runtime": 61.5922, |
|
"eval_samples_per_second": 1.624, |
|
"eval_steps_per_second": 1.624, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.2859790623188019, |
|
"learning_rate": 8.067440822044469e-05, |
|
"loss": 0.1046, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 1.1303648948669434, |
|
"learning_rate": 8.051134671030687e-05, |
|
"loss": 0.4458, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.7454203963279724, |
|
"learning_rate": 8.034833904671698e-05, |
|
"loss": 0.3143, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.2475353479385376, |
|
"learning_rate": 8.018538568006027e-05, |
|
"loss": 0.1534, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.41427668929100037, |
|
"learning_rate": 8.002248706057177e-05, |
|
"loss": 0.172, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.23500694334506989, |
|
"learning_rate": 7.985964363833532e-05, |
|
"loss": 0.0494, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.30438652634620667, |
|
"learning_rate": 7.96968558632824e-05, |
|
"loss": 0.1334, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.4113299548625946, |
|
"learning_rate": 7.953412418519052e-05, |
|
"loss": 0.3013, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.28593701124191284, |
|
"learning_rate": 7.937144905368226e-05, |
|
"loss": 0.1119, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.3217616081237793, |
|
"learning_rate": 7.920883091822408e-05, |
|
"loss": 0.3012, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.39298737049102783, |
|
"learning_rate": 7.904627022812483e-05, |
|
"loss": 0.5322, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.3343609869480133, |
|
"learning_rate": 7.888376743253463e-05, |
|
"loss": 0.2569, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.7963635325431824, |
|
"learning_rate": 7.872132298044382e-05, |
|
"loss": 0.282, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 0.4038046896457672, |
|
"learning_rate": 7.855893732068125e-05, |
|
"loss": 0.203, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.3325504958629608, |
|
"learning_rate": 7.839661090191362e-05, |
|
"loss": 0.353, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.46112361550331116, |
|
"learning_rate": 7.823434417264378e-05, |
|
"loss": 0.3071, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.36491167545318604, |
|
"learning_rate": 7.807213758120966e-05, |
|
"loss": 0.352, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.3772267997264862, |
|
"learning_rate": 7.790999157578314e-05, |
|
"loss": 0.3064, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.4712262749671936, |
|
"learning_rate": 7.774790660436858e-05, |
|
"loss": 0.2215, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.634762167930603, |
|
"learning_rate": 7.758588311480174e-05, |
|
"loss": 0.1912, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 1.0294770002365112, |
|
"learning_rate": 7.742392155474858e-05, |
|
"loss": 0.1456, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.3933314383029938, |
|
"learning_rate": 7.726202237170387e-05, |
|
"loss": 0.2846, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.30901187658309937, |
|
"learning_rate": 7.710018601299004e-05, |
|
"loss": 0.2486, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.30429914593696594, |
|
"learning_rate": 7.693841292575598e-05, |
|
"loss": 0.387, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.2635176181793213, |
|
"learning_rate": 7.677670355697577e-05, |
|
"loss": 0.1412, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.3051837086677551, |
|
"learning_rate": 7.661505835344732e-05, |
|
"loss": 0.1199, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.4841059744358063, |
|
"learning_rate": 7.645347776179144e-05, |
|
"loss": 0.1758, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.7497323155403137, |
|
"learning_rate": 7.629196222845026e-05, |
|
"loss": 0.2512, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.3562595546245575, |
|
"learning_rate": 7.613051219968623e-05, |
|
"loss": 0.2186, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.2094510793685913, |
|
"learning_rate": 7.596912812158083e-05, |
|
"loss": 0.2998, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.6023332476615906, |
|
"learning_rate": 7.580781044003324e-05, |
|
"loss": 0.1246, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.4656884968280792, |
|
"learning_rate": 7.564655960075927e-05, |
|
"loss": 0.2749, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 0.6948531866073608, |
|
"learning_rate": 7.548537604929001e-05, |
|
"loss": 0.5442, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.315352201461792, |
|
"learning_rate": 7.532426023097063e-05, |
|
"loss": 0.4247, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.45906028151512146, |
|
"learning_rate": 7.516321259095921e-05, |
|
"loss": 0.1876, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.5653383135795593, |
|
"learning_rate": 7.500223357422536e-05, |
|
"loss": 0.1735, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.3202337920665741, |
|
"learning_rate": 7.484132362554915e-05, |
|
"loss": 0.3102, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.612182080745697, |
|
"learning_rate": 7.468048318951983e-05, |
|
"loss": 0.4846, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.3194877803325653, |
|
"learning_rate": 7.451971271053455e-05, |
|
"loss": 0.059, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.5662250518798828, |
|
"learning_rate": 7.435901263279716e-05, |
|
"loss": 0.0554, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.37810468673706055, |
|
"learning_rate": 7.419838340031708e-05, |
|
"loss": 0.2623, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.4857700765132904, |
|
"learning_rate": 7.403782545690787e-05, |
|
"loss": 0.3497, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.22125770151615143, |
|
"learning_rate": 7.387733924618617e-05, |
|
"loss": 0.035, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.3674735426902771, |
|
"learning_rate": 7.371692521157048e-05, |
|
"loss": 0.625, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.45555412769317627, |
|
"learning_rate": 7.35565837962798e-05, |
|
"loss": 0.4273, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.7296884059906006, |
|
"learning_rate": 7.339631544333249e-05, |
|
"loss": 0.3723, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.41084760427474976, |
|
"learning_rate": 7.323612059554513e-05, |
|
"loss": 0.1341, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.4270436465740204, |
|
"learning_rate": 7.307599969553111e-05, |
|
"loss": 0.4426, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.23851610720157623, |
|
"learning_rate": 7.291595318569951e-05, |
|
"loss": 0.3558, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.3444399833679199, |
|
"learning_rate": 7.275598150825393e-05, |
|
"loss": 0.3752, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.43362972140312195, |
|
"learning_rate": 7.25960851051912e-05, |
|
"loss": 0.1467, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.6192312240600586, |
|
"learning_rate": 7.243626441830009e-05, |
|
"loss": 0.376, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.5423337817192078, |
|
"learning_rate": 7.227651988916031e-05, |
|
"loss": 0.4163, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.0165411233901978, |
|
"learning_rate": 7.211685195914097e-05, |
|
"loss": 0.3563, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.544395923614502, |
|
"learning_rate": 7.195726106939974e-05, |
|
"loss": 0.3344, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.32629308104515076, |
|
"learning_rate": 7.179774766088126e-05, |
|
"loss": 0.3126, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.3347086012363434, |
|
"learning_rate": 7.163831217431615e-05, |
|
"loss": 0.3867, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.35171234607696533, |
|
"learning_rate": 7.14789550502198e-05, |
|
"loss": 0.3186, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.41637492179870605, |
|
"learning_rate": 7.131967672889101e-05, |
|
"loss": 0.308, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.3099021315574646, |
|
"learning_rate": 7.116047765041079e-05, |
|
"loss": 0.2962, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 1.0291274785995483, |
|
"learning_rate": 7.100135825464139e-05, |
|
"loss": 0.0858, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.3761727213859558, |
|
"learning_rate": 7.084231898122477e-05, |
|
"loss": 0.3224, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.21233822405338287, |
|
"learning_rate": 7.068336026958146e-05, |
|
"loss": 0.0349, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 1.1428797245025635, |
|
"learning_rate": 7.052448255890957e-05, |
|
"loss": 0.2133, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.44500914216041565, |
|
"learning_rate": 7.036568628818331e-05, |
|
"loss": 0.2843, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.4282483458518982, |
|
"learning_rate": 7.02069718961518e-05, |
|
"loss": 0.1139, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.3170280456542969, |
|
"learning_rate": 7.004833982133808e-05, |
|
"loss": 0.1759, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.54608553647995, |
|
"learning_rate": 6.988979050203768e-05, |
|
"loss": 0.32, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.40999898314476013, |
|
"learning_rate": 6.973132437631742e-05, |
|
"loss": 0.1294, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.5694934129714966, |
|
"learning_rate": 6.957294188201438e-05, |
|
"loss": 0.4025, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.19408564269542694, |
|
"learning_rate": 6.941464345673449e-05, |
|
"loss": 0.0711, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.0297935009002686, |
|
"learning_rate": 6.925642953785136e-05, |
|
"loss": 0.4554, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.6384499669075012, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 0.7755, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.35377582907676697, |
|
"learning_rate": 6.894025696760163e-05, |
|
"loss": 0.3368, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.6168931126594543, |
|
"learning_rate": 6.878229918981003e-05, |
|
"loss": 0.3632, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.4333438575267792, |
|
"learning_rate": 6.862442766556297e-05, |
|
"loss": 0.4481, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.3434096872806549, |
|
"learning_rate": 6.846664283105455e-05, |
|
"loss": 0.4594, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.8049106001853943, |
|
"learning_rate": 6.830894512223946e-05, |
|
"loss": 0.1239, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.21577104926109314, |
|
"learning_rate": 6.815133497483157e-05, |
|
"loss": 0.0594, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 0.3035653829574585, |
|
"learning_rate": 6.799381282430284e-05, |
|
"loss": 0.2847, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.346308171749115, |
|
"learning_rate": 6.783637910588216e-05, |
|
"loss": 0.4226, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.40908312797546387, |
|
"learning_rate": 6.767903425455401e-05, |
|
"loss": 0.5447, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.44740766286849976, |
|
"learning_rate": 6.752177870505736e-05, |
|
"loss": 0.1076, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.3309715986251831, |
|
"learning_rate": 6.736461289188445e-05, |
|
"loss": 0.1884, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.29329484701156616, |
|
"learning_rate": 6.720753724927958e-05, |
|
"loss": 0.3133, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.6964632272720337, |
|
"learning_rate": 6.705055221123788e-05, |
|
"loss": 0.5469, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.3863834738731384, |
|
"learning_rate": 6.68936582115042e-05, |
|
"loss": 0.4276, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.32202091813087463, |
|
"learning_rate": 6.673685568357182e-05, |
|
"loss": 0.2867, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.45666739344596863, |
|
"learning_rate": 6.658014506068126e-05, |
|
"loss": 0.3404, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 1.0935767889022827, |
|
"learning_rate": 6.642352677581917e-05, |
|
"loss": 0.2642, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.49452221393585205, |
|
"learning_rate": 6.626700126171702e-05, |
|
"loss": 0.1137, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.6080031394958496, |
|
"learning_rate": 6.611056895084998e-05, |
|
"loss": 0.2378, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.3335520327091217, |
|
"learning_rate": 6.595423027543571e-05, |
|
"loss": 0.191, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.8033477067947388, |
|
"learning_rate": 6.579798566743314e-05, |
|
"loss": 0.2892, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 2.931178331375122, |
|
"learning_rate": 6.56418355585413e-05, |
|
"loss": 0.4743, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.48405173420906067, |
|
"learning_rate": 6.548578038019815e-05, |
|
"loss": 0.2486, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.6350341439247131, |
|
"learning_rate": 6.532982056357928e-05, |
|
"loss": 0.3742, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.36276134848594666, |
|
"learning_rate": 6.517395653959694e-05, |
|
"loss": 0.2189, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.6255000233650208, |
|
"learning_rate": 6.501818873889855e-05, |
|
"loss": 0.2459, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.36843547224998474, |
|
"learning_rate": 6.486251759186572e-05, |
|
"loss": 0.3125, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.40621039271354675, |
|
"learning_rate": 6.470694352861312e-05, |
|
"loss": 0.2004, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.3194919526576996, |
|
"learning_rate": 6.455146697898703e-05, |
|
"loss": 0.3094, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.21008367836475372, |
|
"learning_rate": 6.439608837256432e-05, |
|
"loss": 0.0541, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.3361469507217407, |
|
"learning_rate": 6.424080813865138e-05, |
|
"loss": 0.5029, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.24147911369800568, |
|
"learning_rate": 6.408562670628266e-05, |
|
"loss": 0.1654, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.8166019916534424, |
|
"learning_rate": 6.393054450421963e-05, |
|
"loss": 0.3575, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.122574806213379, |
|
"learning_rate": 6.377556196094973e-05, |
|
"loss": 0.7164, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.7233923673629761, |
|
"learning_rate": 6.362067950468489e-05, |
|
"loss": 0.3521, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.41336318850517273, |
|
"learning_rate": 6.34658975633605e-05, |
|
"loss": 0.3212, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.8844769597053528, |
|
"learning_rate": 6.331121656463441e-05, |
|
"loss": 0.0649, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.44808632135391235, |
|
"learning_rate": 6.315663693588534e-05, |
|
"loss": 0.3333, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.3650015592575073, |
|
"learning_rate": 6.300215910421212e-05, |
|
"loss": 0.1375, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.4140397310256958, |
|
"learning_rate": 6.28477834964322e-05, |
|
"loss": 0.1505, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.2868010401725769, |
|
"learning_rate": 6.269351053908061e-05, |
|
"loss": 0.0466, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.3398640751838684, |
|
"learning_rate": 6.25393406584088e-05, |
|
"loss": 0.088, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.488488107919693, |
|
"learning_rate": 6.238527428038339e-05, |
|
"loss": 0.3337, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.7760544419288635, |
|
"learning_rate": 6.223131183068499e-05, |
|
"loss": 0.5087, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.48542800545692444, |
|
"learning_rate": 6.207745373470716e-05, |
|
"loss": 0.122, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.21850326657295227, |
|
"learning_rate": 6.192370041755505e-05, |
|
"loss": 0.1532, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"eval_loss": 1.0776242017745972, |
|
"eval_runtime": 61.9441, |
|
"eval_samples_per_second": 1.614, |
|
"eval_steps_per_second": 1.614, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.4770156443119049, |
|
"learning_rate": 6.177005230404431e-05, |
|
"loss": 0.3974, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 1.0366460084915161, |
|
"learning_rate": 6.161650981869998e-05, |
|
"loss": 0.2319, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.3871290683746338, |
|
"learning_rate": 6.146307338575519e-05, |
|
"loss": 0.4389, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.44873878359794617, |
|
"learning_rate": 6.130974342915005e-05, |
|
"loss": 0.2469, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.32535475492477417, |
|
"learning_rate": 6.115652037253053e-05, |
|
"loss": 0.239, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.3552875220775604, |
|
"learning_rate": 6.100340463924723e-05, |
|
"loss": 0.2276, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.45674723386764526, |
|
"learning_rate": 6.0850396652354125e-05, |
|
"loss": 0.3405, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.29195958375930786, |
|
"learning_rate": 6.069749683460765e-05, |
|
"loss": 0.4767, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.3897278308868408, |
|
"learning_rate": 6.0544705608465234e-05, |
|
"loss": 0.2277, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.250284880399704, |
|
"learning_rate": 6.039202339608432e-05, |
|
"loss": 0.2529, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.2508828043937683, |
|
"learning_rate": 6.023945061932119e-05, |
|
"loss": 0.0292, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.29530829191207886, |
|
"learning_rate": 6.008698769972967e-05, |
|
"loss": 0.1958, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.18633241951465607, |
|
"learning_rate": 5.9934635058560154e-05, |
|
"loss": 0.0424, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.7199178338050842, |
|
"learning_rate": 5.978239311675826e-05, |
|
"loss": 0.3123, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.5743251442909241, |
|
"learning_rate": 5.963026229496378e-05, |
|
"loss": 0.18, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 2.2675962448120117, |
|
"learning_rate": 5.9478243013509505e-05, |
|
"loss": 0.324, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.24627915024757385, |
|
"learning_rate": 5.9326335692419995e-05, |
|
"loss": 0.1563, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.5204538702964783, |
|
"learning_rate": 5.9174540751410487e-05, |
|
"loss": 0.1695, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 1.0078840255737305, |
|
"learning_rate": 5.902285860988576e-05, |
|
"loss": 0.2403, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.8918837904930115, |
|
"learning_rate": 5.887128968693887e-05, |
|
"loss": 0.2705, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.5288832783699036, |
|
"learning_rate": 5.871983440135005e-05, |
|
"loss": 0.3406, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.7802530527114868, |
|
"learning_rate": 5.856849317158563e-05, |
|
"loss": 0.4431, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.829023540019989, |
|
"learning_rate": 5.8417266415796745e-05, |
|
"loss": 0.1579, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.2933697998523712, |
|
"learning_rate": 5.8266154551818216e-05, |
|
"loss": 0.292, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 1.175118327140808, |
|
"learning_rate": 5.8115157997167536e-05, |
|
"loss": 0.3135, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.26484665274620056, |
|
"learning_rate": 5.796427716904347e-05, |
|
"loss": 0.0947, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.290715754032135, |
|
"learning_rate": 5.7813512484325095e-05, |
|
"loss": 0.3051, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.7810901403427124, |
|
"learning_rate": 5.7662864359570624e-05, |
|
"loss": 0.6841, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.5301364660263062, |
|
"learning_rate": 5.751233321101617e-05, |
|
"loss": 0.2107, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.3347890079021454, |
|
"learning_rate": 5.736191945457463e-05, |
|
"loss": 0.2544, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.34437447786331177, |
|
"learning_rate": 5.72116235058346e-05, |
|
"loss": 0.3537, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 1.3457096815109253, |
|
"learning_rate": 5.7061445780059074e-05, |
|
"loss": 0.462, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.32543686032295227, |
|
"learning_rate": 5.69113866921846e-05, |
|
"loss": 0.0387, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.38246065378189087, |
|
"learning_rate": 5.676144665681974e-05, |
|
"loss": 0.3505, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.35182633996009827, |
|
"learning_rate": 5.6611626088244194e-05, |
|
"loss": 0.2105, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.4109022617340088, |
|
"learning_rate": 5.6461925400407576e-05, |
|
"loss": 0.3537, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 0.3107440173625946, |
|
"learning_rate": 5.631234500692828e-05, |
|
"loss": 0.1429, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.4284103512763977, |
|
"learning_rate": 5.616288532109225e-05, |
|
"loss": 0.2854, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.32733407616615295, |
|
"learning_rate": 5.601354675585209e-05, |
|
"loss": 0.2061, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 1.2653546333312988, |
|
"learning_rate": 5.58643297238256e-05, |
|
"loss": 0.2411, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.3521691560745239, |
|
"learning_rate": 5.571523463729487e-05, |
|
"loss": 0.2751, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.5209056735038757, |
|
"learning_rate": 5.5566261908204966e-05, |
|
"loss": 0.3047, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.9035019278526306, |
|
"learning_rate": 5.541741194816299e-05, |
|
"loss": 0.3731, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.34125107526779175, |
|
"learning_rate": 5.526868516843673e-05, |
|
"loss": 0.2955, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.36946702003479004, |
|
"learning_rate": 5.5120081979953785e-05, |
|
"loss": 0.1661, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.3534347414970398, |
|
"learning_rate": 5.497160279330014e-05, |
|
"loss": 0.4519, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.2882026135921478, |
|
"learning_rate": 5.482324801871919e-05, |
|
"loss": 0.5972, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.7276318669319153, |
|
"learning_rate": 5.467501806611062e-05, |
|
"loss": 0.2326, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.23008614778518677, |
|
"learning_rate": 5.452691334502922e-05, |
|
"loss": 0.2549, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.23662728071212769, |
|
"learning_rate": 5.43789342646837e-05, |
|
"loss": 0.0845, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 1.2522404193878174, |
|
"learning_rate": 5.423108123393581e-05, |
|
"loss": 0.4237, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.2206917107105255, |
|
"learning_rate": 5.4083354661298814e-05, |
|
"loss": 0.1515, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.3296979069709778, |
|
"learning_rate": 5.393575495493679e-05, |
|
"loss": 0.2217, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.6194382905960083, |
|
"learning_rate": 5.378828252266308e-05, |
|
"loss": 0.3087, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.1941995471715927, |
|
"learning_rate": 5.3640937771939436e-05, |
|
"loss": 0.0136, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.3312687277793884, |
|
"learning_rate": 5.349372110987496e-05, |
|
"loss": 0.2721, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.40799418091773987, |
|
"learning_rate": 5.3346632943224695e-05, |
|
"loss": 0.492, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 1.0968602895736694, |
|
"learning_rate": 5.3199673678388685e-05, |
|
"loss": 0.3102, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.5079402327537537, |
|
"learning_rate": 5.305284372141095e-05, |
|
"loss": 0.361, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 1.0647687911987305, |
|
"learning_rate": 5.290614347797802e-05, |
|
"loss": 0.3519, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.9241762161254883, |
|
"learning_rate": 5.275957335341814e-05, |
|
"loss": 0.5112, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.20941267907619476, |
|
"learning_rate": 5.261313375270014e-05, |
|
"loss": 0.0871, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.3228456676006317, |
|
"learning_rate": 5.246682508043206e-05, |
|
"loss": 0.2729, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.3993156850337982, |
|
"learning_rate": 5.232064774086022e-05, |
|
"loss": 0.2958, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 2.0890214443206787, |
|
"learning_rate": 5.217460213786821e-05, |
|
"loss": 0.2181, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.35725289583206177, |
|
"learning_rate": 5.2028688674975415e-05, |
|
"loss": 0.4336, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.22372879087924957, |
|
"learning_rate": 5.188290775533624e-05, |
|
"loss": 0.2441, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.38314974308013916, |
|
"learning_rate": 5.1737259781738936e-05, |
|
"loss": 0.3979, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.3898037075996399, |
|
"learning_rate": 5.159174515660432e-05, |
|
"loss": 0.2707, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.5374806523323059, |
|
"learning_rate": 5.1446364281984774e-05, |
|
"loss": 0.2368, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.45632028579711914, |
|
"learning_rate": 5.130111755956327e-05, |
|
"loss": 0.4022, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.635205090045929, |
|
"learning_rate": 5.115600539065197e-05, |
|
"loss": 0.286, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.30840864777565, |
|
"learning_rate": 5.101102817619131e-05, |
|
"loss": 0.1724, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.3157990276813507, |
|
"learning_rate": 5.086618631674888e-05, |
|
"loss": 0.1023, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.6306793093681335, |
|
"learning_rate": 5.072148021251821e-05, |
|
"loss": 0.3076, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.6977675557136536, |
|
"learning_rate": 5.057691026331792e-05, |
|
"loss": 0.3178, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.6471707224845886, |
|
"learning_rate": 5.043247686859024e-05, |
|
"loss": 0.3459, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.28806617856025696, |
|
"learning_rate": 5.02881804274002e-05, |
|
"loss": 0.2028, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.3172370195388794, |
|
"learning_rate": 5.014402133843443e-05, |
|
"loss": 0.2296, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.48926007747650146, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.1085, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.41491979360580444, |
|
"learning_rate": 4.9856116810023465e-05, |
|
"loss": 0.1794, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.5332885980606079, |
|
"learning_rate": 4.971237216604967e-05, |
|
"loss": 0.3493, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.19919279217720032, |
|
"learning_rate": 4.956876646524059e-05, |
|
"loss": 0.0328, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.29925766587257385, |
|
"learning_rate": 4.942530010437435e-05, |
|
"loss": 0.2714, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 0.576008677482605, |
|
"learning_rate": 4.92819734798441e-05, |
|
"loss": 0.2385, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.5098739862442017, |
|
"learning_rate": 4.913878698765686e-05, |
|
"loss": 0.1016, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.4307500123977661, |
|
"learning_rate": 4.899574102343247e-05, |
|
"loss": 0.2031, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.0047227144241333, |
|
"learning_rate": 4.885283598240259e-05, |
|
"loss": 0.3458, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.3505383729934692, |
|
"learning_rate": 4.87100722594094e-05, |
|
"loss": 0.4995, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.4602445960044861, |
|
"learning_rate": 4.856745024890466e-05, |
|
"loss": 0.223, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.5431117415428162, |
|
"learning_rate": 4.842497034494859e-05, |
|
"loss": 0.1288, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.25237104296684265, |
|
"learning_rate": 4.8282632941208725e-05, |
|
"loss": 0.2603, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.6471454501152039, |
|
"learning_rate": 4.814043843095902e-05, |
|
"loss": 0.2201, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.3411142826080322, |
|
"learning_rate": 4.799838720707846e-05, |
|
"loss": 0.4188, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.41913872957229614, |
|
"learning_rate": 4.78564796620502e-05, |
|
"loss": 0.3233, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.4495401084423065, |
|
"learning_rate": 4.771471618796043e-05, |
|
"loss": 0.1504, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.3317515254020691, |
|
"learning_rate": 4.757309717649723e-05, |
|
"loss": 0.2903, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.5954946279525757, |
|
"learning_rate": 4.743162301894952e-05, |
|
"loss": 0.3404, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 0.4872910678386688, |
|
"learning_rate": 4.729029410620615e-05, |
|
"loss": 0.4495, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.23028811812400818, |
|
"learning_rate": 4.7149110828754464e-05, |
|
"loss": 0.278, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.3303956389427185, |
|
"learning_rate": 4.700807357667952e-05, |
|
"loss": 0.1051, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.35193932056427, |
|
"learning_rate": 4.686718273966291e-05, |
|
"loss": 0.2968, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.2598091959953308, |
|
"learning_rate": 4.6726438706981644e-05, |
|
"loss": 0.0954, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.39768552780151367, |
|
"learning_rate": 4.658584186750713e-05, |
|
"loss": 0.0541, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.4370318055152893, |
|
"learning_rate": 4.644539260970416e-05, |
|
"loss": 0.4754, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.5983580350875854, |
|
"learning_rate": 4.6305091321629666e-05, |
|
"loss": 0.0712, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.4080672264099121, |
|
"learning_rate": 4.616493839093179e-05, |
|
"loss": 0.1945, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.3521052300930023, |
|
"learning_rate": 4.6024934204848745e-05, |
|
"loss": 0.191, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.8113175630569458, |
|
"learning_rate": 4.5885079150207776e-05, |
|
"loss": 0.5054, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.899402916431427, |
|
"learning_rate": 4.574537361342407e-05, |
|
"loss": 0.2557, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.8417747020721436, |
|
"learning_rate": 4.560581798049976e-05, |
|
"loss": 0.203, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 1.2963255643844604, |
|
"learning_rate": 4.5466412637022704e-05, |
|
"loss": 0.5346, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.3572009801864624, |
|
"learning_rate": 4.532715796816564e-05, |
|
"loss": 0.2946, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.6167850494384766, |
|
"learning_rate": 4.518805435868492e-05, |
|
"loss": 0.1102, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.2798488736152649, |
|
"learning_rate": 4.50491021929194e-05, |
|
"loss": 0.1322, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.2718496322631836, |
|
"learning_rate": 4.491030185478976e-05, |
|
"loss": 0.0642, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.8502088785171509, |
|
"learning_rate": 4.4771653727797e-05, |
|
"loss": 0.298, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.5349703431129456, |
|
"learning_rate": 4.4633158195021594e-05, |
|
"loss": 0.3594, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.25136202573776245, |
|
"learning_rate": 4.449481563912251e-05, |
|
"loss": 0.0455, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"eval_loss": 1.0920302867889404, |
|
"eval_runtime": 61.5845, |
|
"eval_samples_per_second": 1.624, |
|
"eval_steps_per_second": 1.624, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.7967007160186768, |
|
"learning_rate": 4.435662644233594e-05, |
|
"loss": 0.1578, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.3636241555213928, |
|
"learning_rate": 4.421859098647427e-05, |
|
"loss": 0.2332, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.3658440411090851, |
|
"learning_rate": 4.4080709652925336e-05, |
|
"loss": 0.4128, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.4984844923019409, |
|
"learning_rate": 4.394298282265095e-05, |
|
"loss": 0.1836, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.3186807930469513, |
|
"learning_rate": 4.380541087618606e-05, |
|
"loss": 0.0893, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.6703417897224426, |
|
"learning_rate": 4.3667994193637796e-05, |
|
"loss": 0.0759, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.4573896825313568, |
|
"learning_rate": 4.3530733154684164e-05, |
|
"loss": 0.1458, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.4950135052204132, |
|
"learning_rate": 4.339362813857321e-05, |
|
"loss": 0.3033, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.24762655794620514, |
|
"learning_rate": 4.3256679524121834e-05, |
|
"loss": 0.0559, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.3558330237865448, |
|
"learning_rate": 4.3119887689714844e-05, |
|
"loss": 0.1862, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.48134902119636536, |
|
"learning_rate": 4.298325301330383e-05, |
|
"loss": 0.5581, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.22999516129493713, |
|
"learning_rate": 4.2846775872406256e-05, |
|
"loss": 0.1114, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.4079764485359192, |
|
"learning_rate": 4.27104566441042e-05, |
|
"loss": 0.3144, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.3731940686702728, |
|
"learning_rate": 4.257429570504352e-05, |
|
"loss": 0.2183, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.6186441779136658, |
|
"learning_rate": 4.2438293431432665e-05, |
|
"loss": 0.0373, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.18545430898666382, |
|
"learning_rate": 4.23024501990417e-05, |
|
"loss": 0.0479, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.39455363154411316, |
|
"learning_rate": 4.216676638320135e-05, |
|
"loss": 0.4562, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 1.193217158317566, |
|
"learning_rate": 4.203124235880178e-05, |
|
"loss": 0.1534, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.34430086612701416, |
|
"learning_rate": 4.189587850029169e-05, |
|
"loss": 0.3745, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.4106227159500122, |
|
"learning_rate": 4.176067518167723e-05, |
|
"loss": 0.7812, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.5523481369018555, |
|
"learning_rate": 4.1625632776521037e-05, |
|
"loss": 0.3704, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 0.31600770354270935, |
|
"learning_rate": 4.149075165794105e-05, |
|
"loss": 0.209, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.4721559286117554, |
|
"learning_rate": 4.1356032198609706e-05, |
|
"loss": 0.3099, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 1.2828038930892944, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 0.5042, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.5226539373397827, |
|
"learning_rate": 4.108707974614804e-05, |
|
"loss": 0.5119, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.48910781741142273, |
|
"learning_rate": 4.095284749612503e-05, |
|
"loss": 0.3925, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.3297548294067383, |
|
"learning_rate": 4.081877839156325e-05, |
|
"loss": 0.2415, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.34490349888801575, |
|
"learning_rate": 4.068487280289146e-05, |
|
"loss": 0.3771, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.33798182010650635, |
|
"learning_rate": 4.0551131100086745e-05, |
|
"loss": 0.1344, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.5897701382637024, |
|
"learning_rate": 4.041755365267324e-05, |
|
"loss": 0.3901, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.29294416308403015, |
|
"learning_rate": 4.028414082972141e-05, |
|
"loss": 0.169, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.5478091835975647, |
|
"learning_rate": 4.0150892999846656e-05, |
|
"loss": 0.2538, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.8328322172164917, |
|
"learning_rate": 4.001781053120863e-05, |
|
"loss": 0.3077, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 1.1498992443084717, |
|
"learning_rate": 3.988489379151016e-05, |
|
"loss": 0.6369, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.2736966907978058, |
|
"learning_rate": 3.975214314799607e-05, |
|
"loss": 0.1637, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.6017499566078186, |
|
"learning_rate": 3.961955896745224e-05, |
|
"loss": 0.5734, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.4597730338573456, |
|
"learning_rate": 3.94871416162048e-05, |
|
"loss": 0.6337, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.5489735007286072, |
|
"learning_rate": 3.935489146011869e-05, |
|
"loss": 0.2923, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.6476238965988159, |
|
"learning_rate": 3.9222808864597004e-05, |
|
"loss": 0.4118, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.48180490732192993, |
|
"learning_rate": 3.909089419457996e-05, |
|
"loss": 0.1466, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.25918513536453247, |
|
"learning_rate": 3.89591478145437e-05, |
|
"loss": 0.3142, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.6687664985656738, |
|
"learning_rate": 3.8827570088499356e-05, |
|
"loss": 0.1736, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.3094065189361572, |
|
"learning_rate": 3.8696161379992225e-05, |
|
"loss": 0.1969, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.21293707191944122, |
|
"learning_rate": 3.856492205210043e-05, |
|
"loss": 0.0834, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.5569763779640198, |
|
"learning_rate": 3.843385246743417e-05, |
|
"loss": 0.4948, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.24184778332710266, |
|
"learning_rate": 3.8302952988134756e-05, |
|
"loss": 0.1577, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.29768210649490356, |
|
"learning_rate": 3.817222397587336e-05, |
|
"loss": 0.1199, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.8061155080795288, |
|
"learning_rate": 3.804166579185018e-05, |
|
"loss": 0.5931, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.4685113728046417, |
|
"learning_rate": 3.7911278796793516e-05, |
|
"loss": 0.1634, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.3703949451446533, |
|
"learning_rate": 3.778106335095859e-05, |
|
"loss": 0.1887, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.5604259967803955, |
|
"learning_rate": 3.7651019814126654e-05, |
|
"loss": 0.187, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.3002054989337921, |
|
"learning_rate": 3.7521148545604e-05, |
|
"loss": 0.3549, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.3863959014415741, |
|
"learning_rate": 3.739144990422089e-05, |
|
"loss": 0.0768, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.6352940201759338, |
|
"learning_rate": 3.726192424833075e-05, |
|
"loss": 0.1575, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.2791038453578949, |
|
"learning_rate": 3.7132571935808924e-05, |
|
"loss": 0.3065, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.8539896607398987, |
|
"learning_rate": 3.7003393324051874e-05, |
|
"loss": 0.4698, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.5503263473510742, |
|
"learning_rate": 3.687438876997612e-05, |
|
"loss": 0.4123, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.2520607113838196, |
|
"learning_rate": 3.674555863001725e-05, |
|
"loss": 0.1486, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.5313445329666138, |
|
"learning_rate": 3.661690326012897e-05, |
|
"loss": 0.2031, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.5100160241127014, |
|
"learning_rate": 3.6488423015782125e-05, |
|
"loss": 0.1415, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.3034670054912567, |
|
"learning_rate": 3.6360118251963645e-05, |
|
"loss": 0.5914, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.9517300724983215, |
|
"learning_rate": 3.623198932317566e-05, |
|
"loss": 0.3926, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.23673593997955322, |
|
"learning_rate": 3.610403658343443e-05, |
|
"loss": 0.2653, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.3499943017959595, |
|
"learning_rate": 3.597626038626942e-05, |
|
"loss": 0.5564, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.5328202843666077, |
|
"learning_rate": 3.58486610847223e-05, |
|
"loss": 0.2586, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.32311174273490906, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.1838, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.17368283867835999, |
|
"learning_rate": 3.5593994578203896e-05, |
|
"loss": 0.2052, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.23117925226688385, |
|
"learning_rate": 3.546692807686829e-05, |
|
"loss": 0.0167, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.33799508213996887, |
|
"learning_rate": 3.534003987842005e-05, |
|
"loss": 0.1975, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.24989762902259827, |
|
"learning_rate": 3.5213330333447346e-05, |
|
"loss": 0.019, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.5386832356452942, |
|
"learning_rate": 3.508679979204481e-05, |
|
"loss": 0.2893, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.9841434955596924, |
|
"learning_rate": 3.496044860381238e-05, |
|
"loss": 0.5701, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.13189686834812164, |
|
"learning_rate": 3.483427711785449e-05, |
|
"loss": 0.0177, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.43873071670532227, |
|
"learning_rate": 3.4708285682779076e-05, |
|
"loss": 0.2277, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.5309714078903198, |
|
"learning_rate": 3.458247464669657e-05, |
|
"loss": 0.4126, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.2741256356239319, |
|
"learning_rate": 3.445684435721897e-05, |
|
"loss": 0.3022, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.3837210237979889, |
|
"learning_rate": 3.4331395161458955e-05, |
|
"loss": 0.2618, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.3155161440372467, |
|
"learning_rate": 3.4206127406028745e-05, |
|
"loss": 0.2366, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.1386852264404297, |
|
"learning_rate": 3.408104143703929e-05, |
|
"loss": 0.0333, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 0.2975553572177887, |
|
"learning_rate": 3.395613760009925e-05, |
|
"loss": 0.0488, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.46444180607795715, |
|
"learning_rate": 3.383141624031408e-05, |
|
"loss": 0.1012, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.475568950176239, |
|
"learning_rate": 3.3706877702285036e-05, |
|
"loss": 0.3058, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.3736167252063751, |
|
"learning_rate": 3.35825223301083e-05, |
|
"loss": 0.0624, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.35307514667510986, |
|
"learning_rate": 3.345835046737391e-05, |
|
"loss": 0.4223, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.977942943572998, |
|
"learning_rate": 3.333436245716488e-05, |
|
"loss": 0.2905, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.43840837478637695, |
|
"learning_rate": 3.3210558642056275e-05, |
|
"loss": 0.1153, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.7894395589828491, |
|
"learning_rate": 3.308693936411421e-05, |
|
"loss": 0.3199, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.0266164541244507, |
|
"learning_rate": 3.29635049648949e-05, |
|
"loss": 0.2558, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 0.45427221059799194, |
|
"learning_rate": 3.2840255785443855e-05, |
|
"loss": 0.1216, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.422296404838562, |
|
"learning_rate": 3.271719216629468e-05, |
|
"loss": 0.2097, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.5444633364677429, |
|
"learning_rate": 3.259431444746846e-05, |
|
"loss": 0.3705, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.26783403754234314, |
|
"learning_rate": 3.247162296847249e-05, |
|
"loss": 0.1156, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 1.415783166885376, |
|
"learning_rate": 3.234911806829948e-05, |
|
"loss": 0.1728, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.4155539274215698, |
|
"learning_rate": 3.222680008542678e-05, |
|
"loss": 0.5061, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.5668177008628845, |
|
"learning_rate": 3.210466935781516e-05, |
|
"loss": 0.3171, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.41083770990371704, |
|
"learning_rate": 3.198272622290804e-05, |
|
"loss": 0.3899, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.4589744210243225, |
|
"learning_rate": 3.1860971017630604e-05, |
|
"loss": 0.2183, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.33844196796417236, |
|
"learning_rate": 3.173940407838871e-05, |
|
"loss": 0.2511, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.4281659424304962, |
|
"learning_rate": 3.161802574106799e-05, |
|
"loss": 0.4536, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.5119270086288452, |
|
"learning_rate": 3.149683634103312e-05, |
|
"loss": 0.6213, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.3323350250720978, |
|
"learning_rate": 3.137583621312665e-05, |
|
"loss": 0.4426, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.3763193190097809, |
|
"learning_rate": 3.1255025691668185e-05, |
|
"loss": 0.3041, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.41001826524734497, |
|
"learning_rate": 3.1134405110453515e-05, |
|
"loss": 0.4205, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.4732879400253296, |
|
"learning_rate": 3.101397480275359e-05, |
|
"loss": 0.2774, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.36144134402275085, |
|
"learning_rate": 3.089373510131354e-05, |
|
"loss": 0.2278, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.39741820096969604, |
|
"learning_rate": 3.077368633835205e-05, |
|
"loss": 0.2114, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.5269529223442078, |
|
"learning_rate": 3.065382884556012e-05, |
|
"loss": 0.5934, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.3559035360813141, |
|
"learning_rate": 3.053416295410026e-05, |
|
"loss": 0.3411, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.5966904163360596, |
|
"learning_rate": 3.0414688994605723e-05, |
|
"loss": 0.1953, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.28111594915390015, |
|
"learning_rate": 3.0295407297179325e-05, |
|
"loss": 0.3585, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.4853847622871399, |
|
"learning_rate": 3.0176318191392726e-05, |
|
"loss": 0.5636, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.6061645150184631, |
|
"learning_rate": 3.005742200628545e-05, |
|
"loss": 0.1, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.28980934619903564, |
|
"learning_rate": 2.9938719070363952e-05, |
|
"loss": 0.383, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.34016743302345276, |
|
"learning_rate": 2.9820209711600854e-05, |
|
"loss": 0.2209, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.26997146010398865, |
|
"learning_rate": 2.9701894257433826e-05, |
|
"loss": 0.2524, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.29038554430007935, |
|
"learning_rate": 2.9583773034764826e-05, |
|
"loss": 0.3877, |
|
"step": 1425 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1900, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 475, |
|
"total_flos": 1.0468132032282624e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|