LLaVA-Med-pp / SLAKE /trainer_state.json
yunfeixie's picture
Upload folder using huggingface_hub
d23d7b1 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 14.908943089430894,
"eval_steps": 500,
"global_step": 1140,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.013008130081300813,
"grad_norm": 230.88125753772232,
"learning_rate": 5.714285714285715e-07,
"loss": 8.6242,
"step": 1
},
{
"epoch": 0.026016260162601626,
"grad_norm": 208.76444315978463,
"learning_rate": 1.142857142857143e-06,
"loss": 8.5102,
"step": 2
},
{
"epoch": 0.03902439024390244,
"grad_norm": 216.97693342125092,
"learning_rate": 1.7142857142857145e-06,
"loss": 8.6752,
"step": 3
},
{
"epoch": 0.05203252032520325,
"grad_norm": 231.45591088243566,
"learning_rate": 2.285714285714286e-06,
"loss": 8.4936,
"step": 4
},
{
"epoch": 0.06504065040650407,
"grad_norm": 212.77968160229423,
"learning_rate": 2.8571428571428573e-06,
"loss": 7.6862,
"step": 5
},
{
"epoch": 0.07804878048780488,
"grad_norm": 156.7176569348902,
"learning_rate": 3.428571428571429e-06,
"loss": 5.9776,
"step": 6
},
{
"epoch": 0.0910569105691057,
"grad_norm": 90.12791665931498,
"learning_rate": 4.000000000000001e-06,
"loss": 3.2091,
"step": 7
},
{
"epoch": 0.1040650406504065,
"grad_norm": 61.965766767519,
"learning_rate": 4.571428571428572e-06,
"loss": 2.3428,
"step": 8
},
{
"epoch": 0.11707317073170732,
"grad_norm": 47.54431569154033,
"learning_rate": 5.142857142857142e-06,
"loss": 1.7836,
"step": 9
},
{
"epoch": 0.13008130081300814,
"grad_norm": 47.20331806396116,
"learning_rate": 5.7142857142857145e-06,
"loss": 1.4851,
"step": 10
},
{
"epoch": 0.14308943089430895,
"grad_norm": 38.36570648710507,
"learning_rate": 6.285714285714286e-06,
"loss": 1.3206,
"step": 11
},
{
"epoch": 0.15609756097560976,
"grad_norm": 29.066038353919424,
"learning_rate": 6.857142857142858e-06,
"loss": 1.1916,
"step": 12
},
{
"epoch": 0.16910569105691056,
"grad_norm": 21.722668900948843,
"learning_rate": 7.428571428571429e-06,
"loss": 1.1014,
"step": 13
},
{
"epoch": 0.1821138211382114,
"grad_norm": 15.87425801890939,
"learning_rate": 8.000000000000001e-06,
"loss": 0.8121,
"step": 14
},
{
"epoch": 0.1951219512195122,
"grad_norm": 17.587447371642845,
"learning_rate": 8.571428571428571e-06,
"loss": 0.9999,
"step": 15
},
{
"epoch": 0.208130081300813,
"grad_norm": 16.23353351483177,
"learning_rate": 9.142857142857144e-06,
"loss": 0.8833,
"step": 16
},
{
"epoch": 0.22113821138211381,
"grad_norm": 15.487688413211991,
"learning_rate": 9.714285714285715e-06,
"loss": 0.5773,
"step": 17
},
{
"epoch": 0.23414634146341465,
"grad_norm": 13.566620977135004,
"learning_rate": 1.0285714285714285e-05,
"loss": 0.7757,
"step": 18
},
{
"epoch": 0.24715447154471545,
"grad_norm": 12.462190186073848,
"learning_rate": 1.0857142857142858e-05,
"loss": 0.6909,
"step": 19
},
{
"epoch": 0.2601626016260163,
"grad_norm": 9.863692920491205,
"learning_rate": 1.1428571428571429e-05,
"loss": 0.5492,
"step": 20
},
{
"epoch": 0.2731707317073171,
"grad_norm": 15.820830571242366,
"learning_rate": 1.2e-05,
"loss": 0.6604,
"step": 21
},
{
"epoch": 0.2861788617886179,
"grad_norm": 12.483935396645652,
"learning_rate": 1.2571428571428572e-05,
"loss": 0.6353,
"step": 22
},
{
"epoch": 0.2991869918699187,
"grad_norm": 13.167505155488156,
"learning_rate": 1.3142857142857145e-05,
"loss": 0.6198,
"step": 23
},
{
"epoch": 0.3121951219512195,
"grad_norm": 11.312617034012124,
"learning_rate": 1.3714285714285716e-05,
"loss": 0.6247,
"step": 24
},
{
"epoch": 0.3252032520325203,
"grad_norm": 10.440846014107416,
"learning_rate": 1.4285714285714287e-05,
"loss": 0.5274,
"step": 25
},
{
"epoch": 0.3382113821138211,
"grad_norm": 13.199338633996815,
"learning_rate": 1.4857142857142858e-05,
"loss": 0.6209,
"step": 26
},
{
"epoch": 0.35121951219512193,
"grad_norm": 12.032299778483855,
"learning_rate": 1.542857142857143e-05,
"loss": 0.653,
"step": 27
},
{
"epoch": 0.3642276422764228,
"grad_norm": 11.866587650490903,
"learning_rate": 1.6000000000000003e-05,
"loss": 0.4805,
"step": 28
},
{
"epoch": 0.3772357723577236,
"grad_norm": 8.933036188530423,
"learning_rate": 1.6571428571428574e-05,
"loss": 0.4935,
"step": 29
},
{
"epoch": 0.3902439024390244,
"grad_norm": 10.436307527405338,
"learning_rate": 1.7142857142857142e-05,
"loss": 0.5817,
"step": 30
},
{
"epoch": 0.4032520325203252,
"grad_norm": 9.297670799220597,
"learning_rate": 1.7714285714285717e-05,
"loss": 0.4342,
"step": 31
},
{
"epoch": 0.416260162601626,
"grad_norm": 7.600678151452253,
"learning_rate": 1.8285714285714288e-05,
"loss": 0.4984,
"step": 32
},
{
"epoch": 0.4292682926829268,
"grad_norm": 8.286210274367773,
"learning_rate": 1.885714285714286e-05,
"loss": 0.4809,
"step": 33
},
{
"epoch": 0.44227642276422763,
"grad_norm": 8.654757939781744,
"learning_rate": 1.942857142857143e-05,
"loss": 0.5042,
"step": 34
},
{
"epoch": 0.45528455284552843,
"grad_norm": 7.70000564697565,
"learning_rate": 2e-05,
"loss": 0.4869,
"step": 35
},
{
"epoch": 0.4682926829268293,
"grad_norm": 7.2151873058037115,
"learning_rate": 1.999995958478429e-05,
"loss": 0.4446,
"step": 36
},
{
"epoch": 0.4813008130081301,
"grad_norm": 6.722907147890219,
"learning_rate": 1.999983833946383e-05,
"loss": 0.4187,
"step": 37
},
{
"epoch": 0.4943089430894309,
"grad_norm": 6.534701787629057,
"learning_rate": 1.9999636265018655e-05,
"loss": 0.4235,
"step": 38
},
{
"epoch": 0.5073170731707317,
"grad_norm": 8.837751955851171,
"learning_rate": 1.999935336308214e-05,
"loss": 0.5159,
"step": 39
},
{
"epoch": 0.5203252032520326,
"grad_norm": 8.3598460515532,
"learning_rate": 1.9998989635940996e-05,
"loss": 0.459,
"step": 40
},
{
"epoch": 0.5333333333333333,
"grad_norm": 5.861736968755028,
"learning_rate": 1.9998545086535242e-05,
"loss": 0.3528,
"step": 41
},
{
"epoch": 0.5463414634146342,
"grad_norm": 7.139988271289857,
"learning_rate": 1.9998019718458194e-05,
"loss": 0.4082,
"step": 42
},
{
"epoch": 0.5593495934959349,
"grad_norm": 7.909067319149876,
"learning_rate": 1.999741353595642e-05,
"loss": 0.4493,
"step": 43
},
{
"epoch": 0.5723577235772358,
"grad_norm": 8.541027166094969,
"learning_rate": 1.9996726543929717e-05,
"loss": 0.4543,
"step": 44
},
{
"epoch": 0.5853658536585366,
"grad_norm": 6.376818167843216,
"learning_rate": 1.9995958747931083e-05,
"loss": 0.4319,
"step": 45
},
{
"epoch": 0.5983739837398374,
"grad_norm": 7.666803532320634,
"learning_rate": 1.9995110154166636e-05,
"loss": 0.4639,
"step": 46
},
{
"epoch": 0.6113821138211382,
"grad_norm": 5.366689030683648,
"learning_rate": 1.99941807694956e-05,
"loss": 0.3108,
"step": 47
},
{
"epoch": 0.624390243902439,
"grad_norm": 4.76561069358933,
"learning_rate": 1.9993170601430233e-05,
"loss": 0.3488,
"step": 48
},
{
"epoch": 0.6373983739837399,
"grad_norm": 5.579353151449992,
"learning_rate": 1.9992079658135757e-05,
"loss": 0.3292,
"step": 49
},
{
"epoch": 0.6504065040650406,
"grad_norm": 4.529381400281467,
"learning_rate": 1.9990907948430327e-05,
"loss": 0.3233,
"step": 50
},
{
"epoch": 0.6634146341463415,
"grad_norm": 4.89310755054693,
"learning_rate": 1.9989655481784917e-05,
"loss": 0.2976,
"step": 51
},
{
"epoch": 0.6764227642276422,
"grad_norm": 6.094851323707289,
"learning_rate": 1.998832226832327e-05,
"loss": 0.3427,
"step": 52
},
{
"epoch": 0.6894308943089431,
"grad_norm": 7.519057655874433,
"learning_rate": 1.9986908318821804e-05,
"loss": 0.4168,
"step": 53
},
{
"epoch": 0.7024390243902439,
"grad_norm": 6.734374113916692,
"learning_rate": 1.998541364470954e-05,
"loss": 0.3456,
"step": 54
},
{
"epoch": 0.7154471544715447,
"grad_norm": 5.611175619279291,
"learning_rate": 1.998383825806799e-05,
"loss": 0.3057,
"step": 55
},
{
"epoch": 0.7284552845528456,
"grad_norm": 5.415014977209956,
"learning_rate": 1.9982182171631065e-05,
"loss": 0.3841,
"step": 56
},
{
"epoch": 0.7414634146341463,
"grad_norm": 6.040329603282613,
"learning_rate": 1.9980445398784998e-05,
"loss": 0.3914,
"step": 57
},
{
"epoch": 0.7544715447154472,
"grad_norm": 5.620902759656132,
"learning_rate": 1.9978627953568187e-05,
"loss": 0.3412,
"step": 58
},
{
"epoch": 0.767479674796748,
"grad_norm": 7.154464466166392,
"learning_rate": 1.997672985067113e-05,
"loss": 0.3879,
"step": 59
},
{
"epoch": 0.7804878048780488,
"grad_norm": 6.832499040469465,
"learning_rate": 1.9974751105436266e-05,
"loss": 0.4399,
"step": 60
},
{
"epoch": 0.7934959349593496,
"grad_norm": 5.90878066056323,
"learning_rate": 1.997269173385788e-05,
"loss": 0.3527,
"step": 61
},
{
"epoch": 0.8065040650406504,
"grad_norm": 6.0530816990814,
"learning_rate": 1.9970551752581964e-05,
"loss": 0.4588,
"step": 62
},
{
"epoch": 0.8195121951219512,
"grad_norm": 5.527433101647836,
"learning_rate": 1.9968331178906082e-05,
"loss": 0.385,
"step": 63
},
{
"epoch": 0.832520325203252,
"grad_norm": 4.3592020276526915,
"learning_rate": 1.9966030030779216e-05,
"loss": 0.2998,
"step": 64
},
{
"epoch": 0.8455284552845529,
"grad_norm": 5.254802387533948,
"learning_rate": 1.9963648326801653e-05,
"loss": 0.3382,
"step": 65
},
{
"epoch": 0.8585365853658536,
"grad_norm": 9.281183098955887,
"learning_rate": 1.996118608622481e-05,
"loss": 0.394,
"step": 66
},
{
"epoch": 0.8715447154471545,
"grad_norm": 8.38170471119074,
"learning_rate": 1.9958643328951083e-05,
"loss": 0.3701,
"step": 67
},
{
"epoch": 0.8845528455284553,
"grad_norm": 6.313134696492434,
"learning_rate": 1.9956020075533683e-05,
"loss": 0.4177,
"step": 68
},
{
"epoch": 0.8975609756097561,
"grad_norm": 4.348932383253668,
"learning_rate": 1.995331634717649e-05,
"loss": 0.2945,
"step": 69
},
{
"epoch": 0.9105691056910569,
"grad_norm": 4.433107884805326,
"learning_rate": 1.9950532165733847e-05,
"loss": 0.3633,
"step": 70
},
{
"epoch": 0.9235772357723577,
"grad_norm": 5.362504384821112,
"learning_rate": 1.994766755371042e-05,
"loss": 0.2932,
"step": 71
},
{
"epoch": 0.9365853658536586,
"grad_norm": 5.581345628544194,
"learning_rate": 1.994472253426099e-05,
"loss": 0.3256,
"step": 72
},
{
"epoch": 0.9495934959349593,
"grad_norm": 5.942912969150186,
"learning_rate": 1.9941697131190273e-05,
"loss": 0.396,
"step": 73
},
{
"epoch": 0.9626016260162602,
"grad_norm": 6.177454994439193,
"learning_rate": 1.993859136895274e-05,
"loss": 0.4033,
"step": 74
},
{
"epoch": 0.975609756097561,
"grad_norm": 5.3956839647398045,
"learning_rate": 1.993540527265239e-05,
"loss": 0.2666,
"step": 75
},
{
"epoch": 0.9886178861788618,
"grad_norm": 5.614670334975762,
"learning_rate": 1.993213886804259e-05,
"loss": 0.2943,
"step": 76
},
{
"epoch": 1.0016260162601627,
"grad_norm": 4.36195352952533,
"learning_rate": 1.9928792181525818e-05,
"loss": 0.2482,
"step": 77
},
{
"epoch": 1.0146341463414634,
"grad_norm": 4.4213065815641635,
"learning_rate": 1.992536524015349e-05,
"loss": 0.2827,
"step": 78
},
{
"epoch": 1.0276422764227642,
"grad_norm": 3.990647245954731,
"learning_rate": 1.992185807162572e-05,
"loss": 0.2666,
"step": 79
},
{
"epoch": 1.040650406504065,
"grad_norm": 3.861722635891494,
"learning_rate": 1.9918270704291104e-05,
"loss": 0.1926,
"step": 80
},
{
"epoch": 1.053658536585366,
"grad_norm": 5.300012815448601,
"learning_rate": 1.9914603167146488e-05,
"loss": 0.2653,
"step": 81
},
{
"epoch": 1.0666666666666667,
"grad_norm": 5.892127749110154,
"learning_rate": 1.9910855489836734e-05,
"loss": 0.2561,
"step": 82
},
{
"epoch": 1.0796747967479674,
"grad_norm": 5.396249404462623,
"learning_rate": 1.9907027702654472e-05,
"loss": 0.2834,
"step": 83
},
{
"epoch": 1.0926829268292684,
"grad_norm": 4.64475319206961,
"learning_rate": 1.9903119836539877e-05,
"loss": 0.2191,
"step": 84
},
{
"epoch": 1.1056910569105691,
"grad_norm": 6.605780905277876,
"learning_rate": 1.98991319230804e-05,
"loss": 0.2772,
"step": 85
},
{
"epoch": 1.1186991869918699,
"grad_norm": 5.909098612060078,
"learning_rate": 1.9895063994510512e-05,
"loss": 0.2477,
"step": 86
},
{
"epoch": 1.1317073170731708,
"grad_norm": 5.365729884394164,
"learning_rate": 1.9890916083711463e-05,
"loss": 0.2456,
"step": 87
},
{
"epoch": 1.1447154471544716,
"grad_norm": 4.018252159668762,
"learning_rate": 1.9886688224210988e-05,
"loss": 0.1861,
"step": 88
},
{
"epoch": 1.1577235772357723,
"grad_norm": 4.853996625324599,
"learning_rate": 1.988238045018306e-05,
"loss": 0.2517,
"step": 89
},
{
"epoch": 1.170731707317073,
"grad_norm": 6.643416305208953,
"learning_rate": 1.9877992796447604e-05,
"loss": 0.2827,
"step": 90
},
{
"epoch": 1.183739837398374,
"grad_norm": 6.1422339106616635,
"learning_rate": 1.987352529847021e-05,
"loss": 0.3496,
"step": 91
},
{
"epoch": 1.1967479674796748,
"grad_norm": 6.13749677489359,
"learning_rate": 1.9868977992361866e-05,
"loss": 0.3071,
"step": 92
},
{
"epoch": 1.2097560975609756,
"grad_norm": 5.168323351119836,
"learning_rate": 1.9864350914878635e-05,
"loss": 0.2334,
"step": 93
},
{
"epoch": 1.2227642276422763,
"grad_norm": 4.1938765456615,
"learning_rate": 1.9859644103421384e-05,
"loss": 0.2995,
"step": 94
},
{
"epoch": 1.2357723577235773,
"grad_norm": 5.211177152960159,
"learning_rate": 1.9854857596035476e-05,
"loss": 0.292,
"step": 95
},
{
"epoch": 1.248780487804878,
"grad_norm": 4.518477406407479,
"learning_rate": 1.984999143141046e-05,
"loss": 0.2276,
"step": 96
},
{
"epoch": 1.2617886178861788,
"grad_norm": 5.486829663924403,
"learning_rate": 1.9845045648879747e-05,
"loss": 0.2806,
"step": 97
},
{
"epoch": 1.2747967479674798,
"grad_norm": 5.215692596964664,
"learning_rate": 1.9840020288420314e-05,
"loss": 0.2663,
"step": 98
},
{
"epoch": 1.2878048780487805,
"grad_norm": 5.105134623598349,
"learning_rate": 1.983491539065237e-05,
"loss": 0.3274,
"step": 99
},
{
"epoch": 1.3008130081300813,
"grad_norm": 4.417060162706909,
"learning_rate": 1.982973099683902e-05,
"loss": 0.2896,
"step": 100
},
{
"epoch": 1.3138211382113822,
"grad_norm": 3.9117691328961506,
"learning_rate": 1.9824467148885942e-05,
"loss": 0.2262,
"step": 101
},
{
"epoch": 1.326829268292683,
"grad_norm": 4.648561917684093,
"learning_rate": 1.981912388934105e-05,
"loss": 0.2347,
"step": 102
},
{
"epoch": 1.3398373983739837,
"grad_norm": 5.317602564983213,
"learning_rate": 1.9813701261394136e-05,
"loss": 0.3124,
"step": 103
},
{
"epoch": 1.3528455284552845,
"grad_norm": 4.364016353059765,
"learning_rate": 1.9808199308876543e-05,
"loss": 0.2752,
"step": 104
},
{
"epoch": 1.3658536585365852,
"grad_norm": 4.99818282544751,
"learning_rate": 1.9802618076260784e-05,
"loss": 0.2777,
"step": 105
},
{
"epoch": 1.3788617886178862,
"grad_norm": 4.483511669693501,
"learning_rate": 1.9796957608660203e-05,
"loss": 0.2571,
"step": 106
},
{
"epoch": 1.391869918699187,
"grad_norm": 3.9121651483634716,
"learning_rate": 1.9791217951828607e-05,
"loss": 0.2554,
"step": 107
},
{
"epoch": 1.4048780487804877,
"grad_norm": 4.124381091041692,
"learning_rate": 1.978539915215989e-05,
"loss": 0.3098,
"step": 108
},
{
"epoch": 1.4178861788617887,
"grad_norm": 4.833188929280863,
"learning_rate": 1.9779501256687658e-05,
"loss": 0.2949,
"step": 109
},
{
"epoch": 1.4308943089430894,
"grad_norm": 4.278489655548645,
"learning_rate": 1.9773524313084857e-05,
"loss": 0.2002,
"step": 110
},
{
"epoch": 1.4439024390243902,
"grad_norm": 6.2723343004180725,
"learning_rate": 1.9767468369663382e-05,
"loss": 0.3199,
"step": 111
},
{
"epoch": 1.4569105691056912,
"grad_norm": 4.348567811446221,
"learning_rate": 1.9761333475373677e-05,
"loss": 0.246,
"step": 112
},
{
"epoch": 1.469918699186992,
"grad_norm": 3.7868561454272203,
"learning_rate": 1.975511967980437e-05,
"loss": 0.2748,
"step": 113
},
{
"epoch": 1.4829268292682927,
"grad_norm": 5.143565989659963,
"learning_rate": 1.9748827033181825e-05,
"loss": 0.2796,
"step": 114
},
{
"epoch": 1.4959349593495934,
"grad_norm": 3.9447419922701115,
"learning_rate": 1.9742455586369786e-05,
"loss": 0.22,
"step": 115
},
{
"epoch": 1.5089430894308942,
"grad_norm": 4.588912104539782,
"learning_rate": 1.9736005390868923e-05,
"loss": 0.2845,
"step": 116
},
{
"epoch": 1.5219512195121951,
"grad_norm": 4.319670446630065,
"learning_rate": 1.9729476498816455e-05,
"loss": 0.203,
"step": 117
},
{
"epoch": 1.534959349593496,
"grad_norm": 3.971807551499835,
"learning_rate": 1.9722868962985693e-05,
"loss": 0.2594,
"step": 118
},
{
"epoch": 1.5479674796747966,
"grad_norm": 3.9363850698183422,
"learning_rate": 1.971618283678563e-05,
"loss": 0.2365,
"step": 119
},
{
"epoch": 1.5609756097560976,
"grad_norm": 4.511257223022539,
"learning_rate": 1.9709418174260523e-05,
"loss": 0.2491,
"step": 120
},
{
"epoch": 1.5739837398373984,
"grad_norm": 4.886072844335961,
"learning_rate": 1.970257503008942e-05,
"loss": 0.239,
"step": 121
},
{
"epoch": 1.5869918699186991,
"grad_norm": 4.675630782649018,
"learning_rate": 1.969565345958576e-05,
"loss": 0.3074,
"step": 122
},
{
"epoch": 1.6,
"grad_norm": 5.006731738820173,
"learning_rate": 1.9688653518696886e-05,
"loss": 0.3073,
"step": 123
},
{
"epoch": 1.6130081300813008,
"grad_norm": 4.303983792791088,
"learning_rate": 1.9681575264003635e-05,
"loss": 0.2118,
"step": 124
},
{
"epoch": 1.6260162601626016,
"grad_norm": 4.448918927453546,
"learning_rate": 1.9674418752719835e-05,
"loss": 0.2438,
"step": 125
},
{
"epoch": 1.6390243902439026,
"grad_norm": 5.131140117465671,
"learning_rate": 1.9667184042691877e-05,
"loss": 0.2298,
"step": 126
},
{
"epoch": 1.652032520325203,
"grad_norm": 3.2399581338003407,
"learning_rate": 1.9659871192398237e-05,
"loss": 0.1416,
"step": 127
},
{
"epoch": 1.665040650406504,
"grad_norm": 3.511401536131484,
"learning_rate": 1.9652480260948995e-05,
"loss": 0.1798,
"step": 128
},
{
"epoch": 1.678048780487805,
"grad_norm": 4.63237984345691,
"learning_rate": 1.9645011308085374e-05,
"loss": 0.2026,
"step": 129
},
{
"epoch": 1.6910569105691056,
"grad_norm": 4.875330162486382,
"learning_rate": 1.963746439417924e-05,
"loss": 0.2993,
"step": 130
},
{
"epoch": 1.7040650406504065,
"grad_norm": 3.486729559236034,
"learning_rate": 1.9629839580232625e-05,
"loss": 0.1391,
"step": 131
},
{
"epoch": 1.7170731707317073,
"grad_norm": 4.272470459292228,
"learning_rate": 1.9622136927877226e-05,
"loss": 0.2911,
"step": 132
},
{
"epoch": 1.730081300813008,
"grad_norm": 4.004111381617907,
"learning_rate": 1.9614356499373918e-05,
"loss": 0.2615,
"step": 133
},
{
"epoch": 1.743089430894309,
"grad_norm": 3.870300097881223,
"learning_rate": 1.9606498357612236e-05,
"loss": 0.2086,
"step": 134
},
{
"epoch": 1.7560975609756098,
"grad_norm": 3.6842754269925213,
"learning_rate": 1.959856256610988e-05,
"loss": 0.2352,
"step": 135
},
{
"epoch": 1.7691056910569105,
"grad_norm": 4.992457315291344,
"learning_rate": 1.95905491890122e-05,
"loss": 0.2737,
"step": 136
},
{
"epoch": 1.7821138211382115,
"grad_norm": 3.6899274911801654,
"learning_rate": 1.9582458291091664e-05,
"loss": 0.174,
"step": 137
},
{
"epoch": 1.7951219512195122,
"grad_norm": 4.050726740720838,
"learning_rate": 1.9574289937747347e-05,
"loss": 0.2454,
"step": 138
},
{
"epoch": 1.808130081300813,
"grad_norm": 3.543669223885536,
"learning_rate": 1.956604419500441e-05,
"loss": 0.1782,
"step": 139
},
{
"epoch": 1.821138211382114,
"grad_norm": 3.920644016977587,
"learning_rate": 1.9557721129513538e-05,
"loss": 0.2294,
"step": 140
},
{
"epoch": 1.8341463414634145,
"grad_norm": 4.439148624281575,
"learning_rate": 1.9549320808550435e-05,
"loss": 0.2925,
"step": 141
},
{
"epoch": 1.8471544715447155,
"grad_norm": 4.439161121958125,
"learning_rate": 1.9540843300015253e-05,
"loss": 0.2422,
"step": 142
},
{
"epoch": 1.8601626016260162,
"grad_norm": 5.760691734688802,
"learning_rate": 1.953228867243206e-05,
"loss": 0.2448,
"step": 143
},
{
"epoch": 1.873170731707317,
"grad_norm": 4.152636368976296,
"learning_rate": 1.9523656994948285e-05,
"loss": 0.2391,
"step": 144
},
{
"epoch": 1.886178861788618,
"grad_norm": 3.5715039772686494,
"learning_rate": 1.9514948337334144e-05,
"loss": 0.1995,
"step": 145
},
{
"epoch": 1.8991869918699187,
"grad_norm": 4.445231140655837,
"learning_rate": 1.950616276998209e-05,
"loss": 0.2827,
"step": 146
},
{
"epoch": 1.9121951219512194,
"grad_norm": 3.5242260861609047,
"learning_rate": 1.9497300363906253e-05,
"loss": 0.2037,
"step": 147
},
{
"epoch": 1.9252032520325204,
"grad_norm": 4.556876251890979,
"learning_rate": 1.9488361190741836e-05,
"loss": 0.2261,
"step": 148
},
{
"epoch": 1.9382113821138212,
"grad_norm": 4.176486612051313,
"learning_rate": 1.947934532274456e-05,
"loss": 0.2941,
"step": 149
},
{
"epoch": 1.951219512195122,
"grad_norm": 4.155824009528282,
"learning_rate": 1.947025283279008e-05,
"loss": 0.2782,
"step": 150
},
{
"epoch": 1.9642276422764229,
"grad_norm": 4.283716648269924,
"learning_rate": 1.946108379437338e-05,
"loss": 0.2707,
"step": 151
},
{
"epoch": 1.9772357723577236,
"grad_norm": 3.748554130938304,
"learning_rate": 1.94518382816082e-05,
"loss": 0.2401,
"step": 152
},
{
"epoch": 1.9902439024390244,
"grad_norm": 4.366040799270219,
"learning_rate": 1.9442516369226408e-05,
"loss": 0.2643,
"step": 153
},
{
"epoch": 2.0032520325203254,
"grad_norm": 3.5008784661458128,
"learning_rate": 1.9433118132577432e-05,
"loss": 0.2126,
"step": 154
},
{
"epoch": 2.016260162601626,
"grad_norm": 3.8928484247693182,
"learning_rate": 1.9423643647627625e-05,
"loss": 0.1804,
"step": 155
},
{
"epoch": 2.029268292682927,
"grad_norm": 3.2309992540447543,
"learning_rate": 1.9414092990959653e-05,
"loss": 0.1824,
"step": 156
},
{
"epoch": 2.042276422764228,
"grad_norm": 2.8004486850259758,
"learning_rate": 1.9404466239771887e-05,
"loss": 0.1376,
"step": 157
},
{
"epoch": 2.0552845528455284,
"grad_norm": 4.04090570287216,
"learning_rate": 1.9394763471877774e-05,
"loss": 0.2145,
"step": 158
},
{
"epoch": 2.0682926829268293,
"grad_norm": 4.247108451612536,
"learning_rate": 1.9384984765705202e-05,
"loss": 0.2189,
"step": 159
},
{
"epoch": 2.08130081300813,
"grad_norm": 4.17448093795093,
"learning_rate": 1.937513020029588e-05,
"loss": 0.2356,
"step": 160
},
{
"epoch": 2.094308943089431,
"grad_norm": 3.2247564577083403,
"learning_rate": 1.936519985530468e-05,
"loss": 0.1848,
"step": 161
},
{
"epoch": 2.107317073170732,
"grad_norm": 3.6746992074200038,
"learning_rate": 1.9355193810999015e-05,
"loss": 0.2142,
"step": 162
},
{
"epoch": 2.1203252032520323,
"grad_norm": 3.535692174455577,
"learning_rate": 1.934511214825817e-05,
"loss": 0.176,
"step": 163
},
{
"epoch": 2.1333333333333333,
"grad_norm": 3.724515733913512,
"learning_rate": 1.9334954948572656e-05,
"loss": 0.2109,
"step": 164
},
{
"epoch": 2.1463414634146343,
"grad_norm": 4.149262557760461,
"learning_rate": 1.932472229404356e-05,
"loss": 0.1744,
"step": 165
},
{
"epoch": 2.159349593495935,
"grad_norm": 4.4628517154906095,
"learning_rate": 1.931441426738187e-05,
"loss": 0.2687,
"step": 166
},
{
"epoch": 2.1723577235772358,
"grad_norm": 4.0932795934488215,
"learning_rate": 1.930403095190781e-05,
"loss": 0.2271,
"step": 167
},
{
"epoch": 2.1853658536585368,
"grad_norm": 2.947126269214458,
"learning_rate": 1.9293572431550166e-05,
"loss": 0.1473,
"step": 168
},
{
"epoch": 2.1983739837398373,
"grad_norm": 3.5196607854273143,
"learning_rate": 1.9283038790845612e-05,
"loss": 0.1968,
"step": 169
},
{
"epoch": 2.2113821138211383,
"grad_norm": 3.8823015570591446,
"learning_rate": 1.9272430114938018e-05,
"loss": 0.1652,
"step": 170
},
{
"epoch": 2.2243902439024392,
"grad_norm": 3.548545585661667,
"learning_rate": 1.9261746489577767e-05,
"loss": 0.1511,
"step": 171
},
{
"epoch": 2.2373983739837398,
"grad_norm": 4.02995884194372,
"learning_rate": 1.9250988001121068e-05,
"loss": 0.1896,
"step": 172
},
{
"epoch": 2.2504065040650407,
"grad_norm": 3.517215015548888,
"learning_rate": 1.9240154736529242e-05,
"loss": 0.1959,
"step": 173
},
{
"epoch": 2.2634146341463417,
"grad_norm": 4.035966445241826,
"learning_rate": 1.922924678336804e-05,
"loss": 0.2133,
"step": 174
},
{
"epoch": 2.2764227642276422,
"grad_norm": 3.8693491335089663,
"learning_rate": 1.9218264229806917e-05,
"loss": 0.2202,
"step": 175
},
{
"epoch": 2.289430894308943,
"grad_norm": 3.833616161618607,
"learning_rate": 1.9207207164618323e-05,
"loss": 0.155,
"step": 176
},
{
"epoch": 2.3024390243902437,
"grad_norm": 3.4283946520122477,
"learning_rate": 1.9196075677177e-05,
"loss": 0.1617,
"step": 177
},
{
"epoch": 2.3154471544715447,
"grad_norm": 3.311159228202799,
"learning_rate": 1.9184869857459233e-05,
"loss": 0.2036,
"step": 178
},
{
"epoch": 2.3284552845528457,
"grad_norm": 3.4656577408210056,
"learning_rate": 1.917358979604215e-05,
"loss": 0.1962,
"step": 179
},
{
"epoch": 2.341463414634146,
"grad_norm": 3.996270083581475,
"learning_rate": 1.9162235584102973e-05,
"loss": 0.1621,
"step": 180
},
{
"epoch": 2.354471544715447,
"grad_norm": 2.861575747203407,
"learning_rate": 1.9150807313418293e-05,
"loss": 0.1435,
"step": 181
},
{
"epoch": 2.367479674796748,
"grad_norm": 3.87366838202817,
"learning_rate": 1.9139305076363305e-05,
"loss": 0.2592,
"step": 182
},
{
"epoch": 2.3804878048780487,
"grad_norm": 3.8471663369504565,
"learning_rate": 1.9127728965911094e-05,
"loss": 0.1688,
"step": 183
},
{
"epoch": 2.3934959349593496,
"grad_norm": 3.1610128734552982,
"learning_rate": 1.911607907563186e-05,
"loss": 0.1633,
"step": 184
},
{
"epoch": 2.40650406504065,
"grad_norm": 3.6510959997562575,
"learning_rate": 1.9104355499692166e-05,
"loss": 0.1751,
"step": 185
},
{
"epoch": 2.419512195121951,
"grad_norm": 2.9027919461413942,
"learning_rate": 1.9092558332854186e-05,
"loss": 0.1615,
"step": 186
},
{
"epoch": 2.432520325203252,
"grad_norm": 3.5763801032437277,
"learning_rate": 1.9080687670474923e-05,
"loss": 0.1901,
"step": 187
},
{
"epoch": 2.4455284552845526,
"grad_norm": 4.437211684445719,
"learning_rate": 1.9068743608505454e-05,
"loss": 0.185,
"step": 188
},
{
"epoch": 2.4585365853658536,
"grad_norm": 2.5057412891368114,
"learning_rate": 1.9056726243490152e-05,
"loss": 0.1432,
"step": 189
},
{
"epoch": 2.4715447154471546,
"grad_norm": 3.4687500624153844,
"learning_rate": 1.9044635672565898e-05,
"loss": 0.1741,
"step": 190
},
{
"epoch": 2.484552845528455,
"grad_norm": 3.6619841695079915,
"learning_rate": 1.903247199346129e-05,
"loss": 0.1753,
"step": 191
},
{
"epoch": 2.497560975609756,
"grad_norm": 3.1520059206227033,
"learning_rate": 1.9020235304495877e-05,
"loss": 0.1365,
"step": 192
},
{
"epoch": 2.510569105691057,
"grad_norm": 3.673031351652811,
"learning_rate": 1.9007925704579346e-05,
"loss": 0.2016,
"step": 193
},
{
"epoch": 2.5235772357723576,
"grad_norm": 3.46021385752599,
"learning_rate": 1.8995543293210713e-05,
"loss": 0.1416,
"step": 194
},
{
"epoch": 2.5365853658536586,
"grad_norm": 3.9573065236715084,
"learning_rate": 1.8983088170477556e-05,
"loss": 0.1802,
"step": 195
},
{
"epoch": 2.5495934959349595,
"grad_norm": 4.484576176480101,
"learning_rate": 1.8970560437055162e-05,
"loss": 0.2086,
"step": 196
},
{
"epoch": 2.56260162601626,
"grad_norm": 3.648134049882013,
"learning_rate": 1.8957960194205743e-05,
"loss": 0.1653,
"step": 197
},
{
"epoch": 2.575609756097561,
"grad_norm": 3.958334883444762,
"learning_rate": 1.894528754377761e-05,
"loss": 0.1994,
"step": 198
},
{
"epoch": 2.588617886178862,
"grad_norm": 3.941838601560344,
"learning_rate": 1.8932542588204334e-05,
"loss": 0.2253,
"step": 199
},
{
"epoch": 2.6016260162601625,
"grad_norm": 4.3162453501502895,
"learning_rate": 1.8919725430503946e-05,
"loss": 0.1819,
"step": 200
},
{
"epoch": 2.6146341463414635,
"grad_norm": 3.3609249443688687,
"learning_rate": 1.8906836174278088e-05,
"loss": 0.171,
"step": 201
},
{
"epoch": 2.6276422764227645,
"grad_norm": 3.0099472226700112,
"learning_rate": 1.8893874923711165e-05,
"loss": 0.1484,
"step": 202
},
{
"epoch": 2.640650406504065,
"grad_norm": 3.6588365810612298,
"learning_rate": 1.888084178356953e-05,
"loss": 0.1793,
"step": 203
},
{
"epoch": 2.653658536585366,
"grad_norm": 3.8668614415295464,
"learning_rate": 1.886773685920062e-05,
"loss": 0.2062,
"step": 204
},
{
"epoch": 2.6666666666666665,
"grad_norm": 2.6895178372540047,
"learning_rate": 1.8854560256532098e-05,
"loss": 0.1569,
"step": 205
},
{
"epoch": 2.6796747967479675,
"grad_norm": 3.4719914098671247,
"learning_rate": 1.884131208207102e-05,
"loss": 0.1918,
"step": 206
},
{
"epoch": 2.692682926829268,
"grad_norm": 2.7584556034576804,
"learning_rate": 1.882799244290294e-05,
"loss": 0.147,
"step": 207
},
{
"epoch": 2.705691056910569,
"grad_norm": 3.568599204142701,
"learning_rate": 1.881460144669109e-05,
"loss": 0.2527,
"step": 208
},
{
"epoch": 2.71869918699187,
"grad_norm": 3.5199678305621487,
"learning_rate": 1.8801139201675457e-05,
"loss": 0.2078,
"step": 209
},
{
"epoch": 2.7317073170731705,
"grad_norm": 4.5434445365694724,
"learning_rate": 1.8787605816671956e-05,
"loss": 0.2356,
"step": 210
},
{
"epoch": 2.7447154471544715,
"grad_norm": 3.9127153096216984,
"learning_rate": 1.8774001401071516e-05,
"loss": 0.2082,
"step": 211
},
{
"epoch": 2.7577235772357724,
"grad_norm": 2.96128278616753,
"learning_rate": 1.8760326064839222e-05,
"loss": 0.134,
"step": 212
},
{
"epoch": 2.770731707317073,
"grad_norm": 3.0389228925328586,
"learning_rate": 1.8746579918513404e-05,
"loss": 0.1674,
"step": 213
},
{
"epoch": 2.783739837398374,
"grad_norm": 4.139309382085306,
"learning_rate": 1.8732763073204755e-05,
"loss": 0.2547,
"step": 214
},
{
"epoch": 2.796747967479675,
"grad_norm": 2.769960672266414,
"learning_rate": 1.8718875640595432e-05,
"loss": 0.1527,
"step": 215
},
{
"epoch": 2.8097560975609754,
"grad_norm": 3.072439635949312,
"learning_rate": 1.8704917732938152e-05,
"loss": 0.203,
"step": 216
},
{
"epoch": 2.8227642276422764,
"grad_norm": 3.062261282857323,
"learning_rate": 1.8690889463055285e-05,
"loss": 0.1868,
"step": 217
},
{
"epoch": 2.8357723577235774,
"grad_norm": 3.825404215100477,
"learning_rate": 1.867679094433794e-05,
"loss": 0.2018,
"step": 218
},
{
"epoch": 2.848780487804878,
"grad_norm": 3.126980632581835,
"learning_rate": 1.8662622290745055e-05,
"loss": 0.166,
"step": 219
},
{
"epoch": 2.861788617886179,
"grad_norm": 3.114972845359332,
"learning_rate": 1.864838361680247e-05,
"loss": 0.1551,
"step": 220
},
{
"epoch": 2.87479674796748,
"grad_norm": 2.8749136447110506,
"learning_rate": 1.8634075037601995e-05,
"loss": 0.1491,
"step": 221
},
{
"epoch": 2.8878048780487804,
"grad_norm": 3.768681045766016,
"learning_rate": 1.8619696668800494e-05,
"loss": 0.2277,
"step": 222
},
{
"epoch": 2.9008130081300814,
"grad_norm": 3.340623859849148,
"learning_rate": 1.8605248626618942e-05,
"loss": 0.1699,
"step": 223
},
{
"epoch": 2.9138211382113823,
"grad_norm": 2.482743526058305,
"learning_rate": 1.8590731027841498e-05,
"loss": 0.1511,
"step": 224
},
{
"epoch": 2.926829268292683,
"grad_norm": 3.687140063825773,
"learning_rate": 1.8576143989814524e-05,
"loss": 0.2142,
"step": 225
},
{
"epoch": 2.939837398373984,
"grad_norm": 4.251426921981605,
"learning_rate": 1.8561487630445684e-05,
"loss": 0.2114,
"step": 226
},
{
"epoch": 2.952845528455285,
"grad_norm": 3.4267551820392605,
"learning_rate": 1.8546762068202967e-05,
"loss": 0.192,
"step": 227
},
{
"epoch": 2.9658536585365853,
"grad_norm": 3.723861341283011,
"learning_rate": 1.853196742211372e-05,
"loss": 0.1479,
"step": 228
},
{
"epoch": 2.9788617886178863,
"grad_norm": 2.924455024641601,
"learning_rate": 1.8517103811763713e-05,
"loss": 0.1724,
"step": 229
},
{
"epoch": 2.991869918699187,
"grad_norm": 3.470083478550075,
"learning_rate": 1.8502171357296144e-05,
"loss": 0.1654,
"step": 230
},
{
"epoch": 3.004878048780488,
"grad_norm": 3.3320283803300157,
"learning_rate": 1.8487170179410688e-05,
"loss": 0.1182,
"step": 231
},
{
"epoch": 3.017886178861789,
"grad_norm": 2.7110362310580354,
"learning_rate": 1.8472100399362518e-05,
"loss": 0.1352,
"step": 232
},
{
"epoch": 3.0308943089430893,
"grad_norm": 3.6623049720030996,
"learning_rate": 1.845696213896131e-05,
"loss": 0.1553,
"step": 233
},
{
"epoch": 3.0439024390243903,
"grad_norm": 2.6391263660030106,
"learning_rate": 1.844175552057028e-05,
"loss": 0.1514,
"step": 234
},
{
"epoch": 3.0569105691056913,
"grad_norm": 3.739938811029931,
"learning_rate": 1.8426480667105178e-05,
"loss": 0.1909,
"step": 235
},
{
"epoch": 3.069918699186992,
"grad_norm": 3.9129123235491274,
"learning_rate": 1.8411137702033306e-05,
"loss": 0.1714,
"step": 236
},
{
"epoch": 3.0829268292682928,
"grad_norm": 3.9313197740251957,
"learning_rate": 1.839572674937251e-05,
"loss": 0.1539,
"step": 237
},
{
"epoch": 3.0959349593495933,
"grad_norm": 3.1212290402854626,
"learning_rate": 1.8380247933690184e-05,
"loss": 0.1193,
"step": 238
},
{
"epoch": 3.1089430894308943,
"grad_norm": 5.198294944362961,
"learning_rate": 1.8364701380102267e-05,
"loss": 0.1458,
"step": 239
},
{
"epoch": 3.1219512195121952,
"grad_norm": 3.0080084795066377,
"learning_rate": 1.8349087214272222e-05,
"loss": 0.1541,
"step": 240
},
{
"epoch": 3.1349593495934958,
"grad_norm": 3.6470200841876492,
"learning_rate": 1.8333405562410026e-05,
"loss": 0.1652,
"step": 241
},
{
"epoch": 3.1479674796747967,
"grad_norm": 2.4499275104389686,
"learning_rate": 1.8317656551271138e-05,
"loss": 0.1136,
"step": 242
},
{
"epoch": 3.1609756097560977,
"grad_norm": 3.7602257479905115,
"learning_rate": 1.8301840308155507e-05,
"loss": 0.132,
"step": 243
},
{
"epoch": 3.1739837398373982,
"grad_norm": 3.086338349925423,
"learning_rate": 1.8285956960906502e-05,
"loss": 0.1273,
"step": 244
},
{
"epoch": 3.186991869918699,
"grad_norm": 3.2621971561813603,
"learning_rate": 1.8270006637909907e-05,
"loss": 0.1263,
"step": 245
},
{
"epoch": 3.2,
"grad_norm": 4.095594589401213,
"learning_rate": 1.8253989468092865e-05,
"loss": 0.2073,
"step": 246
},
{
"epoch": 3.2130081300813007,
"grad_norm": 3.3534907822906708,
"learning_rate": 1.823790558092286e-05,
"loss": 0.1585,
"step": 247
},
{
"epoch": 3.2260162601626017,
"grad_norm": 2.9358944102640008,
"learning_rate": 1.8221755106406636e-05,
"loss": 0.1246,
"step": 248
},
{
"epoch": 3.2390243902439027,
"grad_norm": 4.424506593945267,
"learning_rate": 1.8205538175089182e-05,
"loss": 0.2003,
"step": 249
},
{
"epoch": 3.252032520325203,
"grad_norm": 3.8505300778971554,
"learning_rate": 1.818925491805265e-05,
"loss": 0.149,
"step": 250
},
{
"epoch": 3.265040650406504,
"grad_norm": 3.5453083967016683,
"learning_rate": 1.8172905466915315e-05,
"loss": 0.1333,
"step": 251
},
{
"epoch": 3.278048780487805,
"grad_norm": 3.1785634434812566,
"learning_rate": 1.8156489953830488e-05,
"loss": 0.089,
"step": 252
},
{
"epoch": 3.2910569105691057,
"grad_norm": 2.903838553495901,
"learning_rate": 1.8140008511485474e-05,
"loss": 0.1301,
"step": 253
},
{
"epoch": 3.3040650406504066,
"grad_norm": 3.1675877537773505,
"learning_rate": 1.812346127310048e-05,
"loss": 0.1135,
"step": 254
},
{
"epoch": 3.317073170731707,
"grad_norm": 3.1742222793253685,
"learning_rate": 1.810684837242755e-05,
"loss": 0.13,
"step": 255
},
{
"epoch": 3.330081300813008,
"grad_norm": 3.34445630131317,
"learning_rate": 1.8090169943749477e-05,
"loss": 0.1189,
"step": 256
},
{
"epoch": 3.343089430894309,
"grad_norm": 3.3814743110574557,
"learning_rate": 1.8073426121878717e-05,
"loss": 0.1098,
"step": 257
},
{
"epoch": 3.3560975609756096,
"grad_norm": 3.496641524649336,
"learning_rate": 1.8056617042156307e-05,
"loss": 0.1278,
"step": 258
},
{
"epoch": 3.3691056910569106,
"grad_norm": 4.9263432893902115,
"learning_rate": 1.8039742840450764e-05,
"loss": 0.2073,
"step": 259
},
{
"epoch": 3.3821138211382116,
"grad_norm": 3.4975911499873047,
"learning_rate": 1.8022803653156983e-05,
"loss": 0.1183,
"step": 260
},
{
"epoch": 3.395121951219512,
"grad_norm": 3.2595016394744114,
"learning_rate": 1.8005799617195155e-05,
"loss": 0.108,
"step": 261
},
{
"epoch": 3.408130081300813,
"grad_norm": 3.7356695595162175,
"learning_rate": 1.798873087000963e-05,
"loss": 0.1486,
"step": 262
},
{
"epoch": 3.4211382113821136,
"grad_norm": 3.1467257874655354,
"learning_rate": 1.797159754956783e-05,
"loss": 0.1671,
"step": 263
},
{
"epoch": 3.4341463414634146,
"grad_norm": 3.0572228617731048,
"learning_rate": 1.7954399794359115e-05,
"loss": 0.1085,
"step": 264
},
{
"epoch": 3.4471544715447155,
"grad_norm": 3.5444513803003206,
"learning_rate": 1.7937137743393695e-05,
"loss": 0.1809,
"step": 265
},
{
"epoch": 3.460162601626016,
"grad_norm": 4.606794240145359,
"learning_rate": 1.791981153620147e-05,
"loss": 0.2356,
"step": 266
},
{
"epoch": 3.473170731707317,
"grad_norm": 3.608792070604836,
"learning_rate": 1.7902421312830915e-05,
"loss": 0.1449,
"step": 267
},
{
"epoch": 3.486178861788618,
"grad_norm": 3.671931359892289,
"learning_rate": 1.788496721384796e-05,
"loss": 0.1472,
"step": 268
},
{
"epoch": 3.4991869918699186,
"grad_norm": 2.8326328693087794,
"learning_rate": 1.7867449380334834e-05,
"loss": 0.1061,
"step": 269
},
{
"epoch": 3.5121951219512195,
"grad_norm": 5.132149323312545,
"learning_rate": 1.784986795388895e-05,
"loss": 0.1955,
"step": 270
},
{
"epoch": 3.5252032520325205,
"grad_norm": 3.5206185617465726,
"learning_rate": 1.7832223076621728e-05,
"loss": 0.1273,
"step": 271
},
{
"epoch": 3.538211382113821,
"grad_norm": 4.280123868142099,
"learning_rate": 1.7814514891157477e-05,
"loss": 0.1533,
"step": 272
},
{
"epoch": 3.551219512195122,
"grad_norm": 4.385914570096056,
"learning_rate": 1.7796743540632226e-05,
"loss": 0.1689,
"step": 273
},
{
"epoch": 3.564227642276423,
"grad_norm": 4.419998955550363,
"learning_rate": 1.7778909168692562e-05,
"loss": 0.2096,
"step": 274
},
{
"epoch": 3.5772357723577235,
"grad_norm": 3.12280732072736,
"learning_rate": 1.776101191949449e-05,
"loss": 0.1383,
"step": 275
},
{
"epoch": 3.5902439024390245,
"grad_norm": 2.866431440107905,
"learning_rate": 1.774305193770224e-05,
"loss": 0.1375,
"step": 276
},
{
"epoch": 3.6032520325203254,
"grad_norm": 4.007833812240409,
"learning_rate": 1.7725029368487125e-05,
"loss": 0.2024,
"step": 277
},
{
"epoch": 3.616260162601626,
"grad_norm": 3.53283079500347,
"learning_rate": 1.7706944357526344e-05,
"loss": 0.1478,
"step": 278
},
{
"epoch": 3.629268292682927,
"grad_norm": 3.2821005608945937,
"learning_rate": 1.768879705100183e-05,
"loss": 0.1506,
"step": 279
},
{
"epoch": 3.642276422764228,
"grad_norm": 3.6058872672852305,
"learning_rate": 1.7670587595599034e-05,
"loss": 0.1186,
"step": 280
},
{
"epoch": 3.6552845528455284,
"grad_norm": 3.9799824031049216,
"learning_rate": 1.7652316138505775e-05,
"loss": 0.162,
"step": 281
},
{
"epoch": 3.6682926829268294,
"grad_norm": 2.672345057078251,
"learning_rate": 1.763398282741103e-05,
"loss": 0.1257,
"step": 282
},
{
"epoch": 3.68130081300813,
"grad_norm": 3.547459688343558,
"learning_rate": 1.7615587810503742e-05,
"loss": 0.1442,
"step": 283
},
{
"epoch": 3.694308943089431,
"grad_norm": 2.7210687319693165,
"learning_rate": 1.759713123647163e-05,
"loss": 0.1217,
"step": 284
},
{
"epoch": 3.7073170731707314,
"grad_norm": 3.705663104009642,
"learning_rate": 1.757861325449997e-05,
"loss": 0.1432,
"step": 285
},
{
"epoch": 3.7203252032520324,
"grad_norm": 3.469597615351064,
"learning_rate": 1.7560034014270412e-05,
"loss": 0.1496,
"step": 286
},
{
"epoch": 3.7333333333333334,
"grad_norm": 3.8328893728698157,
"learning_rate": 1.754139366595976e-05,
"loss": 0.1689,
"step": 287
},
{
"epoch": 3.746341463414634,
"grad_norm": 4.975275429057309,
"learning_rate": 1.7522692360238754e-05,
"loss": 0.2421,
"step": 288
},
{
"epoch": 3.759349593495935,
"grad_norm": 2.974146879840772,
"learning_rate": 1.750393024827085e-05,
"loss": 0.1462,
"step": 289
},
{
"epoch": 3.772357723577236,
"grad_norm": 4.523596880463953,
"learning_rate": 1.7485107481711014e-05,
"loss": 0.1968,
"step": 290
},
{
"epoch": 3.7853658536585364,
"grad_norm": 4.071673728739772,
"learning_rate": 1.7466224212704476e-05,
"loss": 0.1348,
"step": 291
},
{
"epoch": 3.7983739837398374,
"grad_norm": 3.2858953786050655,
"learning_rate": 1.7447280593885513e-05,
"loss": 0.1562,
"step": 292
},
{
"epoch": 3.8113821138211383,
"grad_norm": 3.9233724024870074,
"learning_rate": 1.7428276778376216e-05,
"loss": 0.1403,
"step": 293
},
{
"epoch": 3.824390243902439,
"grad_norm": 2.747841015884429,
"learning_rate": 1.7409212919785246e-05,
"loss": 0.1402,
"step": 294
},
{
"epoch": 3.83739837398374,
"grad_norm": 3.8222812348697195,
"learning_rate": 1.7390089172206594e-05,
"loss": 0.1366,
"step": 295
},
{
"epoch": 3.850406504065041,
"grad_norm": 4.019423526397215,
"learning_rate": 1.7370905690218336e-05,
"loss": 0.1792,
"step": 296
},
{
"epoch": 3.8634146341463413,
"grad_norm": 3.2152225121342237,
"learning_rate": 1.7351662628881385e-05,
"loss": 0.1304,
"step": 297
},
{
"epoch": 3.8764227642276423,
"grad_norm": 3.2204034243819124,
"learning_rate": 1.7332360143738233e-05,
"loss": 0.1416,
"step": 298
},
{
"epoch": 3.8894308943089433,
"grad_norm": 3.503625110107874,
"learning_rate": 1.7312998390811704e-05,
"loss": 0.1127,
"step": 299
},
{
"epoch": 3.902439024390244,
"grad_norm": 3.6975773220511567,
"learning_rate": 1.7293577526603684e-05,
"loss": 0.1455,
"step": 300
},
{
"epoch": 3.915447154471545,
"grad_norm": 3.346976518634114,
"learning_rate": 1.727409770809385e-05,
"loss": 0.196,
"step": 301
},
{
"epoch": 3.9284552845528458,
"grad_norm": 3.8370342412634466,
"learning_rate": 1.7254559092738422e-05,
"loss": 0.1706,
"step": 302
},
{
"epoch": 3.9414634146341463,
"grad_norm": 4.103423675177489,
"learning_rate": 1.7234961838468865e-05,
"loss": 0.2372,
"step": 303
},
{
"epoch": 3.9544715447154473,
"grad_norm": 3.646214320041045,
"learning_rate": 1.7215306103690633e-05,
"loss": 0.1663,
"step": 304
},
{
"epoch": 3.9674796747967482,
"grad_norm": 4.0269576046459745,
"learning_rate": 1.719559204728188e-05,
"loss": 0.1895,
"step": 305
},
{
"epoch": 3.9804878048780488,
"grad_norm": 3.0723205937626283,
"learning_rate": 1.7175819828592177e-05,
"loss": 0.1684,
"step": 306
},
{
"epoch": 3.9934959349593497,
"grad_norm": 4.069974796680754,
"learning_rate": 1.715598960744121e-05,
"loss": 0.1536,
"step": 307
},
{
"epoch": 4.006504065040651,
"grad_norm": 2.5484710138290745,
"learning_rate": 1.7136101544117526e-05,
"loss": 0.1125,
"step": 308
},
{
"epoch": 4.019512195121951,
"grad_norm": 2.333032908465822,
"learning_rate": 1.7116155799377184e-05,
"loss": 0.1198,
"step": 309
},
{
"epoch": 4.032520325203252,
"grad_norm": 2.536574837694146,
"learning_rate": 1.7096152534442515e-05,
"loss": 0.0875,
"step": 310
},
{
"epoch": 4.045528455284553,
"grad_norm": 2.9417953379935415,
"learning_rate": 1.707609191100076e-05,
"loss": 0.1136,
"step": 311
},
{
"epoch": 4.058536585365854,
"grad_norm": 2.7829202859441122,
"learning_rate": 1.705597409120281e-05,
"loss": 0.1206,
"step": 312
},
{
"epoch": 4.071544715447154,
"grad_norm": 2.946996907122881,
"learning_rate": 1.7035799237661864e-05,
"loss": 0.094,
"step": 313
},
{
"epoch": 4.084552845528456,
"grad_norm": 2.933920883184802,
"learning_rate": 1.701556751345214e-05,
"loss": 0.1369,
"step": 314
},
{
"epoch": 4.097560975609756,
"grad_norm": 2.328489359994484,
"learning_rate": 1.6995279082107537e-05,
"loss": 0.089,
"step": 315
},
{
"epoch": 4.110569105691057,
"grad_norm": 4.249940359177669,
"learning_rate": 1.6974934107620322e-05,
"loss": 0.1335,
"step": 316
},
{
"epoch": 4.123577235772358,
"grad_norm": 3.3688477184048575,
"learning_rate": 1.6954532754439797e-05,
"loss": 0.148,
"step": 317
},
{
"epoch": 4.136585365853659,
"grad_norm": 2.3848280800192208,
"learning_rate": 1.693407518747098e-05,
"loss": 0.0736,
"step": 318
},
{
"epoch": 4.149593495934959,
"grad_norm": 3.4356309937170484,
"learning_rate": 1.6913561572073273e-05,
"loss": 0.0994,
"step": 319
},
{
"epoch": 4.16260162601626,
"grad_norm": 3.6302353229969446,
"learning_rate": 1.689299207405911e-05,
"loss": 0.1455,
"step": 320
},
{
"epoch": 4.175609756097561,
"grad_norm": 5.15932930488356,
"learning_rate": 1.687236685969263e-05,
"loss": 0.1421,
"step": 321
},
{
"epoch": 4.188617886178862,
"grad_norm": 3.971079854170419,
"learning_rate": 1.685168609568833e-05,
"loss": 0.1421,
"step": 322
},
{
"epoch": 4.201626016260162,
"grad_norm": 2.696136494041647,
"learning_rate": 1.6830949949209724e-05,
"loss": 0.0916,
"step": 323
},
{
"epoch": 4.214634146341464,
"grad_norm": 3.4922342292960358,
"learning_rate": 1.6810158587867973e-05,
"loss": 0.0906,
"step": 324
},
{
"epoch": 4.227642276422764,
"grad_norm": 3.3614438222315144,
"learning_rate": 1.678931217972055e-05,
"loss": 0.119,
"step": 325
},
{
"epoch": 4.240650406504065,
"grad_norm": 3.607315750706837,
"learning_rate": 1.6768410893269868e-05,
"loss": 0.1375,
"step": 326
},
{
"epoch": 4.253658536585366,
"grad_norm": 3.84233718866814,
"learning_rate": 1.674745489746193e-05,
"loss": 0.1383,
"step": 327
},
{
"epoch": 4.266666666666667,
"grad_norm": 3.1526315566529286,
"learning_rate": 1.6726444361684956e-05,
"loss": 0.1101,
"step": 328
},
{
"epoch": 4.279674796747967,
"grad_norm": 2.83908738016638,
"learning_rate": 1.6705379455768012e-05,
"loss": 0.1006,
"step": 329
},
{
"epoch": 4.2926829268292686,
"grad_norm": 2.679726062118238,
"learning_rate": 1.6684260349979637e-05,
"loss": 0.0969,
"step": 330
},
{
"epoch": 4.305691056910569,
"grad_norm": 3.839596809167504,
"learning_rate": 1.666308721502648e-05,
"loss": 0.1139,
"step": 331
},
{
"epoch": 4.31869918699187,
"grad_norm": 4.084712815755696,
"learning_rate": 1.66418602220519e-05,
"loss": 0.1159,
"step": 332
},
{
"epoch": 4.331707317073171,
"grad_norm": 3.4860619920681657,
"learning_rate": 1.66205795426346e-05,
"loss": 0.1091,
"step": 333
},
{
"epoch": 4.3447154471544716,
"grad_norm": 2.3329325167077526,
"learning_rate": 1.659924534878723e-05,
"loss": 0.0749,
"step": 334
},
{
"epoch": 4.357723577235772,
"grad_norm": 2.8903925345801937,
"learning_rate": 1.6577857812954994e-05,
"loss": 0.1314,
"step": 335
},
{
"epoch": 4.3707317073170735,
"grad_norm": 2.8706060227172747,
"learning_rate": 1.6556417108014274e-05,
"loss": 0.0888,
"step": 336
},
{
"epoch": 4.383739837398374,
"grad_norm": 2.8472139371320084,
"learning_rate": 1.6534923407271208e-05,
"loss": 0.1146,
"step": 337
},
{
"epoch": 4.396747967479675,
"grad_norm": 3.0525773691176967,
"learning_rate": 1.651337688446031e-05,
"loss": 0.1263,
"step": 338
},
{
"epoch": 4.409756097560976,
"grad_norm": 3.5942000990099077,
"learning_rate": 1.649177771374305e-05,
"loss": 0.1628,
"step": 339
},
{
"epoch": 4.4227642276422765,
"grad_norm": 3.667395925107678,
"learning_rate": 1.6470126069706456e-05,
"loss": 0.1747,
"step": 340
},
{
"epoch": 4.435772357723577,
"grad_norm": 3.3914683707328934,
"learning_rate": 1.6448422127361707e-05,
"loss": 0.1241,
"step": 341
},
{
"epoch": 4.4487804878048784,
"grad_norm": 4.017952969987458,
"learning_rate": 1.64266660621427e-05,
"loss": 0.1639,
"step": 342
},
{
"epoch": 4.461788617886179,
"grad_norm": 2.901364733314786,
"learning_rate": 1.640485804990465e-05,
"loss": 0.1061,
"step": 343
},
{
"epoch": 4.4747967479674795,
"grad_norm": 3.330073088240984,
"learning_rate": 1.6382998266922664e-05,
"loss": 0.1212,
"step": 344
},
{
"epoch": 4.487804878048781,
"grad_norm": 2.371208101416132,
"learning_rate": 1.6361086889890307e-05,
"loss": 0.0857,
"step": 345
},
{
"epoch": 4.5008130081300814,
"grad_norm": 2.166338014502868,
"learning_rate": 1.6339124095918187e-05,
"loss": 0.0705,
"step": 346
},
{
"epoch": 4.513821138211382,
"grad_norm": 2.8664401269053164,
"learning_rate": 1.631711006253251e-05,
"loss": 0.0906,
"step": 347
},
{
"epoch": 4.526829268292683,
"grad_norm": 3.2886681184392987,
"learning_rate": 1.6295044967673664e-05,
"loss": 0.1185,
"step": 348
},
{
"epoch": 4.539837398373984,
"grad_norm": 3.1209823393494323,
"learning_rate": 1.6272928989694764e-05,
"loss": 0.1127,
"step": 349
},
{
"epoch": 4.5528455284552845,
"grad_norm": 3.3212635148917165,
"learning_rate": 1.6250762307360206e-05,
"loss": 0.0961,
"step": 350
},
{
"epoch": 4.565853658536585,
"grad_norm": 2.94052757650137,
"learning_rate": 1.6228545099844244e-05,
"loss": 0.142,
"step": 351
},
{
"epoch": 4.578861788617886,
"grad_norm": 2.397685311630911,
"learning_rate": 1.6206277546729526e-05,
"loss": 0.0774,
"step": 352
},
{
"epoch": 4.591869918699187,
"grad_norm": 3.782885891266012,
"learning_rate": 1.6183959828005647e-05,
"loss": 0.1314,
"step": 353
},
{
"epoch": 4.6048780487804875,
"grad_norm": 2.8084619074891726,
"learning_rate": 1.6161592124067683e-05,
"loss": 0.0993,
"step": 354
},
{
"epoch": 4.617886178861789,
"grad_norm": 3.40847724620437,
"learning_rate": 1.6139174615714753e-05,
"loss": 0.1552,
"step": 355
},
{
"epoch": 4.630894308943089,
"grad_norm": 4.226646665724407,
"learning_rate": 1.611670748414855e-05,
"loss": 0.128,
"step": 356
},
{
"epoch": 4.64390243902439,
"grad_norm": 2.744441703743984,
"learning_rate": 1.6094190910971855e-05,
"loss": 0.1068,
"step": 357
},
{
"epoch": 4.656910569105691,
"grad_norm": 3.102497457734662,
"learning_rate": 1.6071625078187113e-05,
"loss": 0.0928,
"step": 358
},
{
"epoch": 4.669918699186992,
"grad_norm": 2.7529187753733764,
"learning_rate": 1.604901016819492e-05,
"loss": 0.1082,
"step": 359
},
{
"epoch": 4.682926829268292,
"grad_norm": 3.218033171112784,
"learning_rate": 1.6026346363792565e-05,
"loss": 0.1297,
"step": 360
},
{
"epoch": 4.695934959349594,
"grad_norm": 3.5829828322480655,
"learning_rate": 1.6003633848172563e-05,
"loss": 0.1044,
"step": 361
},
{
"epoch": 4.708943089430894,
"grad_norm": 2.5562220404894567,
"learning_rate": 1.598087280492115e-05,
"loss": 0.1094,
"step": 362
},
{
"epoch": 4.721951219512195,
"grad_norm": 2.65870226083651,
"learning_rate": 1.5958063418016832e-05,
"loss": 0.0905,
"step": 363
},
{
"epoch": 4.734959349593496,
"grad_norm": 3.03709602885955,
"learning_rate": 1.5935205871828854e-05,
"loss": 0.1163,
"step": 364
},
{
"epoch": 4.747967479674797,
"grad_norm": 2.7481439619992547,
"learning_rate": 1.591230035111576e-05,
"loss": 0.1232,
"step": 365
},
{
"epoch": 4.760975609756097,
"grad_norm": 4.6838555592653215,
"learning_rate": 1.588934704102385e-05,
"loss": 0.1096,
"step": 366
},
{
"epoch": 4.773983739837398,
"grad_norm": 3.7170873971103378,
"learning_rate": 1.5866346127085733e-05,
"loss": 0.1269,
"step": 367
},
{
"epoch": 4.786991869918699,
"grad_norm": 2.315923572226959,
"learning_rate": 1.5843297795218776e-05,
"loss": 0.0945,
"step": 368
},
{
"epoch": 4.8,
"grad_norm": 3.321463437238242,
"learning_rate": 1.582020223172365e-05,
"loss": 0.0908,
"step": 369
},
{
"epoch": 4.8130081300813,
"grad_norm": 2.657544174519707,
"learning_rate": 1.5797059623282787e-05,
"loss": 0.1172,
"step": 370
},
{
"epoch": 4.826016260162602,
"grad_norm": 3.368090622507422,
"learning_rate": 1.577387015695889e-05,
"loss": 0.137,
"step": 371
},
{
"epoch": 4.839024390243902,
"grad_norm": 2.901693855359267,
"learning_rate": 1.5750634020193412e-05,
"loss": 0.1161,
"step": 372
},
{
"epoch": 4.852032520325203,
"grad_norm": 2.6449094839962233,
"learning_rate": 1.5727351400805054e-05,
"loss": 0.1114,
"step": 373
},
{
"epoch": 4.865040650406504,
"grad_norm": 3.4919666907079527,
"learning_rate": 1.570402248698823e-05,
"loss": 0.1545,
"step": 374
},
{
"epoch": 4.878048780487805,
"grad_norm": 3.635239020195852,
"learning_rate": 1.568064746731156e-05,
"loss": 0.1498,
"step": 375
},
{
"epoch": 4.891056910569105,
"grad_norm": 3.4357399829486686,
"learning_rate": 1.5657226530716334e-05,
"loss": 0.1468,
"step": 376
},
{
"epoch": 4.904065040650407,
"grad_norm": 2.687904712655832,
"learning_rate": 1.563375986651499e-05,
"loss": 0.098,
"step": 377
},
{
"epoch": 4.917073170731707,
"grad_norm": 2.806033909498718,
"learning_rate": 1.5610247664389595e-05,
"loss": 0.1067,
"step": 378
},
{
"epoch": 4.930081300813008,
"grad_norm": 3.3832896563385018,
"learning_rate": 1.5586690114390285e-05,
"loss": 0.126,
"step": 379
},
{
"epoch": 4.943089430894309,
"grad_norm": 3.108700537124724,
"learning_rate": 1.5563087406933762e-05,
"loss": 0.1376,
"step": 380
},
{
"epoch": 4.95609756097561,
"grad_norm": 1.9568452215981411,
"learning_rate": 1.553943973280172e-05,
"loss": 0.0562,
"step": 381
},
{
"epoch": 4.96910569105691,
"grad_norm": 2.873350467301675,
"learning_rate": 1.5515747283139333e-05,
"loss": 0.114,
"step": 382
},
{
"epoch": 4.982113821138212,
"grad_norm": 3.0898926838453304,
"learning_rate": 1.5492010249453694e-05,
"loss": 0.1421,
"step": 383
},
{
"epoch": 4.995121951219512,
"grad_norm": 3.408492358819634,
"learning_rate": 1.5468228823612268e-05,
"loss": 0.1508,
"step": 384
},
{
"epoch": 5.008130081300813,
"grad_norm": 1.9243903184184463,
"learning_rate": 1.5444403197841345e-05,
"loss": 0.0526,
"step": 385
},
{
"epoch": 5.021138211382114,
"grad_norm": 3.0308761794837524,
"learning_rate": 1.5420533564724495e-05,
"loss": 0.0845,
"step": 386
},
{
"epoch": 5.034146341463415,
"grad_norm": 2.0471741531886707,
"learning_rate": 1.5396620117200983e-05,
"loss": 0.0528,
"step": 387
},
{
"epoch": 5.047154471544715,
"grad_norm": 2.2719302954774196,
"learning_rate": 1.537266304856424e-05,
"loss": 0.0731,
"step": 388
},
{
"epoch": 5.060162601626017,
"grad_norm": 2.059060042307859,
"learning_rate": 1.5348662552460286e-05,
"loss": 0.0639,
"step": 389
},
{
"epoch": 5.073170731707317,
"grad_norm": 1.5169263928736323,
"learning_rate": 1.5324618822886167e-05,
"loss": 0.0372,
"step": 390
},
{
"epoch": 5.086178861788618,
"grad_norm": 2.122396210385444,
"learning_rate": 1.5300532054188382e-05,
"loss": 0.0684,
"step": 391
},
{
"epoch": 5.099186991869919,
"grad_norm": 1.9924949570413217,
"learning_rate": 1.527640244106133e-05,
"loss": 0.0518,
"step": 392
},
{
"epoch": 5.11219512195122,
"grad_norm": 2.247674900681653,
"learning_rate": 1.5252230178545704e-05,
"loss": 0.0694,
"step": 393
},
{
"epoch": 5.12520325203252,
"grad_norm": 3.640325939803553,
"learning_rate": 1.5228015462026955e-05,
"loss": 0.0698,
"step": 394
},
{
"epoch": 5.138211382113822,
"grad_norm": 2.512435243390525,
"learning_rate": 1.5203758487233677e-05,
"loss": 0.0692,
"step": 395
},
{
"epoch": 5.151219512195122,
"grad_norm": 3.714473909582526,
"learning_rate": 1.517945945023604e-05,
"loss": 0.075,
"step": 396
},
{
"epoch": 5.164227642276423,
"grad_norm": 2.307335242680726,
"learning_rate": 1.5155118547444215e-05,
"loss": 0.0645,
"step": 397
},
{
"epoch": 5.177235772357724,
"grad_norm": 2.8298726702585326,
"learning_rate": 1.5130735975606765e-05,
"loss": 0.0848,
"step": 398
},
{
"epoch": 5.190243902439025,
"grad_norm": 1.973524301024672,
"learning_rate": 1.510631193180907e-05,
"loss": 0.0462,
"step": 399
},
{
"epoch": 5.203252032520325,
"grad_norm": 3.610899615291046,
"learning_rate": 1.5081846613471736e-05,
"loss": 0.0792,
"step": 400
},
{
"epoch": 5.216260162601626,
"grad_norm": 3.897198559697946,
"learning_rate": 1.505734021834898e-05,
"loss": 0.054,
"step": 401
},
{
"epoch": 5.229268292682927,
"grad_norm": 3.3106506865159764,
"learning_rate": 1.503279294452705e-05,
"loss": 0.0982,
"step": 402
},
{
"epoch": 5.242276422764228,
"grad_norm": 2.9568842405195723,
"learning_rate": 1.5008204990422624e-05,
"loss": 0.0582,
"step": 403
},
{
"epoch": 5.255284552845528,
"grad_norm": 3.3317136023549416,
"learning_rate": 1.4983576554781193e-05,
"loss": 0.0899,
"step": 404
},
{
"epoch": 5.2682926829268295,
"grad_norm": 4.055210728386033,
"learning_rate": 1.4958907836675467e-05,
"loss": 0.0809,
"step": 405
},
{
"epoch": 5.28130081300813,
"grad_norm": 3.089501362613283,
"learning_rate": 1.4934199035503758e-05,
"loss": 0.0807,
"step": 406
},
{
"epoch": 5.294308943089431,
"grad_norm": 4.266765095296973,
"learning_rate": 1.4909450350988368e-05,
"loss": 0.1049,
"step": 407
},
{
"epoch": 5.307317073170732,
"grad_norm": 4.9112430954409065,
"learning_rate": 1.488466198317399e-05,
"loss": 0.1119,
"step": 408
},
{
"epoch": 5.3203252032520325,
"grad_norm": 2.851122180017563,
"learning_rate": 1.485983413242606e-05,
"loss": 0.064,
"step": 409
},
{
"epoch": 5.333333333333333,
"grad_norm": 3.3103976852400057,
"learning_rate": 1.4834966999429179e-05,
"loss": 0.0758,
"step": 410
},
{
"epoch": 5.3463414634146345,
"grad_norm": 2.8833619664931063,
"learning_rate": 1.4810060785185445e-05,
"loss": 0.054,
"step": 411
},
{
"epoch": 5.359349593495935,
"grad_norm": 2.7217775344544184,
"learning_rate": 1.4785115691012866e-05,
"loss": 0.0413,
"step": 412
},
{
"epoch": 5.3723577235772355,
"grad_norm": 2.538196105810992,
"learning_rate": 1.4760131918543717e-05,
"loss": 0.042,
"step": 413
},
{
"epoch": 5.385365853658537,
"grad_norm": 2.9038501446090486,
"learning_rate": 1.4735109669722905e-05,
"loss": 0.0706,
"step": 414
},
{
"epoch": 5.3983739837398375,
"grad_norm": 3.9624551793644835,
"learning_rate": 1.4710049146806348e-05,
"loss": 0.0942,
"step": 415
},
{
"epoch": 5.411382113821138,
"grad_norm": 3.1893235527156354,
"learning_rate": 1.4684950552359335e-05,
"loss": 0.1011,
"step": 416
},
{
"epoch": 5.424390243902439,
"grad_norm": 4.161856110377288,
"learning_rate": 1.4659814089254889e-05,
"loss": 0.1441,
"step": 417
},
{
"epoch": 5.43739837398374,
"grad_norm": 2.045583629237472,
"learning_rate": 1.463463996067212e-05,
"loss": 0.0367,
"step": 418
},
{
"epoch": 5.4504065040650405,
"grad_norm": 2.933530598957382,
"learning_rate": 1.46094283700946e-05,
"loss": 0.0717,
"step": 419
},
{
"epoch": 5.463414634146342,
"grad_norm": 2.846028092528159,
"learning_rate": 1.4584179521308703e-05,
"loss": 0.0719,
"step": 420
},
{
"epoch": 5.476422764227642,
"grad_norm": 3.6129655373506555,
"learning_rate": 1.4558893618401961e-05,
"loss": 0.0996,
"step": 421
},
{
"epoch": 5.489430894308943,
"grad_norm": 4.166578019941605,
"learning_rate": 1.4533570865761422e-05,
"loss": 0.0876,
"step": 422
},
{
"epoch": 5.5024390243902435,
"grad_norm": 3.619734283505654,
"learning_rate": 1.4508211468071985e-05,
"loss": 0.1177,
"step": 423
},
{
"epoch": 5.515447154471545,
"grad_norm": 3.1796554053433117,
"learning_rate": 1.4482815630314752e-05,
"loss": 0.0877,
"step": 424
},
{
"epoch": 5.528455284552845,
"grad_norm": 2.703328251194239,
"learning_rate": 1.4457383557765385e-05,
"loss": 0.0725,
"step": 425
},
{
"epoch": 5.541463414634146,
"grad_norm": 2.707896933553079,
"learning_rate": 1.4431915455992416e-05,
"loss": 0.0744,
"step": 426
},
{
"epoch": 5.554471544715447,
"grad_norm": 3.091276570995233,
"learning_rate": 1.440641153085561e-05,
"loss": 0.0742,
"step": 427
},
{
"epoch": 5.567479674796748,
"grad_norm": 2.4029960605310303,
"learning_rate": 1.4380871988504299e-05,
"loss": 0.0511,
"step": 428
},
{
"epoch": 5.580487804878048,
"grad_norm": 2.6919229748272095,
"learning_rate": 1.4355297035375704e-05,
"loss": 0.083,
"step": 429
},
{
"epoch": 5.59349593495935,
"grad_norm": 2.967320389146333,
"learning_rate": 1.4329686878193271e-05,
"loss": 0.0698,
"step": 430
},
{
"epoch": 5.60650406504065,
"grad_norm": 3.0319786590172555,
"learning_rate": 1.4304041723965009e-05,
"loss": 0.1039,
"step": 431
},
{
"epoch": 5.619512195121951,
"grad_norm": 2.6638640692433326,
"learning_rate": 1.4278361779981806e-05,
"loss": 0.0846,
"step": 432
},
{
"epoch": 5.632520325203252,
"grad_norm": 4.096202236885203,
"learning_rate": 1.4252647253815757e-05,
"loss": 0.0688,
"step": 433
},
{
"epoch": 5.645528455284553,
"grad_norm": 2.3758807014770933,
"learning_rate": 1.4226898353318483e-05,
"loss": 0.0747,
"step": 434
},
{
"epoch": 5.658536585365853,
"grad_norm": 4.045292672850446,
"learning_rate": 1.4201115286619464e-05,
"loss": 0.1139,
"step": 435
},
{
"epoch": 5.671544715447155,
"grad_norm": 2.4680862232566003,
"learning_rate": 1.4175298262124333e-05,
"loss": 0.08,
"step": 436
},
{
"epoch": 5.684552845528455,
"grad_norm": 3.370892166755356,
"learning_rate": 1.4149447488513217e-05,
"loss": 0.1043,
"step": 437
},
{
"epoch": 5.697560975609756,
"grad_norm": 3.6989609013172413,
"learning_rate": 1.4123563174739036e-05,
"loss": 0.0963,
"step": 438
},
{
"epoch": 5.710569105691057,
"grad_norm": 2.7026993176086935,
"learning_rate": 1.4097645530025812e-05,
"loss": 0.1011,
"step": 439
},
{
"epoch": 5.723577235772358,
"grad_norm": 2.671495036637557,
"learning_rate": 1.4071694763866988e-05,
"loss": 0.0732,
"step": 440
},
{
"epoch": 5.736585365853658,
"grad_norm": 2.7556928866773465,
"learning_rate": 1.4045711086023721e-05,
"loss": 0.0926,
"step": 441
},
{
"epoch": 5.74959349593496,
"grad_norm": 3.069018095017195,
"learning_rate": 1.4019694706523203e-05,
"loss": 0.1156,
"step": 442
},
{
"epoch": 5.76260162601626,
"grad_norm": 2.621864072211424,
"learning_rate": 1.3993645835656955e-05,
"loss": 0.0828,
"step": 443
},
{
"epoch": 5.775609756097561,
"grad_norm": 3.428109479643195,
"learning_rate": 1.3967564683979125e-05,
"loss": 0.1033,
"step": 444
},
{
"epoch": 5.788617886178862,
"grad_norm": 2.474144283173875,
"learning_rate": 1.3941451462304778e-05,
"loss": 0.0791,
"step": 445
},
{
"epoch": 5.801626016260163,
"grad_norm": 2.7246381307154866,
"learning_rate": 1.391530638170822e-05,
"loss": 0.0927,
"step": 446
},
{
"epoch": 5.814634146341463,
"grad_norm": 2.1261222791476464,
"learning_rate": 1.3889129653521262e-05,
"loss": 0.0713,
"step": 447
},
{
"epoch": 5.827642276422765,
"grad_norm": 3.0617042801154035,
"learning_rate": 1.3862921489331526e-05,
"loss": 0.1068,
"step": 448
},
{
"epoch": 5.840650406504065,
"grad_norm": 2.8852311127283152,
"learning_rate": 1.3836682100980739e-05,
"loss": 0.0872,
"step": 449
},
{
"epoch": 5.853658536585366,
"grad_norm": 2.6420556921053784,
"learning_rate": 1.3810411700563005e-05,
"loss": 0.0934,
"step": 450
},
{
"epoch": 5.866666666666667,
"grad_norm": 2.1997468772995794,
"learning_rate": 1.3784110500423104e-05,
"loss": 0.0647,
"step": 451
},
{
"epoch": 5.879674796747968,
"grad_norm": 2.569366229126445,
"learning_rate": 1.3757778713154772e-05,
"loss": 0.088,
"step": 452
},
{
"epoch": 5.892682926829268,
"grad_norm": 2.4657362166669,
"learning_rate": 1.3731416551598984e-05,
"loss": 0.0738,
"step": 453
},
{
"epoch": 5.905691056910569,
"grad_norm": 2.3032390982249744,
"learning_rate": 1.3705024228842223e-05,
"loss": 0.0679,
"step": 454
},
{
"epoch": 5.91869918699187,
"grad_norm": 2.4116624882592665,
"learning_rate": 1.3678601958214779e-05,
"loss": 0.0748,
"step": 455
},
{
"epoch": 5.931707317073171,
"grad_norm": 2.9504969618150367,
"learning_rate": 1.3652149953289002e-05,
"loss": 0.0723,
"step": 456
},
{
"epoch": 5.944715447154471,
"grad_norm": 2.9379356263518415,
"learning_rate": 1.362566842787759e-05,
"loss": 0.0693,
"step": 457
},
{
"epoch": 5.957723577235773,
"grad_norm": 2.8833146911131706,
"learning_rate": 1.3599157596031853e-05,
"loss": 0.0791,
"step": 458
},
{
"epoch": 5.970731707317073,
"grad_norm": 2.8272697602148287,
"learning_rate": 1.3572617672039994e-05,
"loss": 0.0706,
"step": 459
},
{
"epoch": 5.983739837398374,
"grad_norm": 2.6799390700321526,
"learning_rate": 1.3546048870425356e-05,
"loss": 0.0728,
"step": 460
},
{
"epoch": 5.996747967479675,
"grad_norm": 5.719197939165723,
"learning_rate": 1.3519451405944717e-05,
"loss": 0.1236,
"step": 461
},
{
"epoch": 6.009756097560976,
"grad_norm": 2.111845499659754,
"learning_rate": 1.3492825493586526e-05,
"loss": 0.0448,
"step": 462
},
{
"epoch": 6.022764227642276,
"grad_norm": 2.1368142778246915,
"learning_rate": 1.346617134856918e-05,
"loss": 0.0385,
"step": 463
},
{
"epoch": 6.035772357723578,
"grad_norm": 2.1481116046164477,
"learning_rate": 1.3439489186339283e-05,
"loss": 0.0342,
"step": 464
},
{
"epoch": 6.048780487804878,
"grad_norm": 1.7270640749140826,
"learning_rate": 1.3412779222569907e-05,
"loss": 0.0301,
"step": 465
},
{
"epoch": 6.061788617886179,
"grad_norm": 2.0239851478698427,
"learning_rate": 1.3386041673158836e-05,
"loss": 0.0501,
"step": 466
},
{
"epoch": 6.07479674796748,
"grad_norm": 1.9442885149236053,
"learning_rate": 1.3359276754226839e-05,
"loss": 0.0402,
"step": 467
},
{
"epoch": 6.087804878048781,
"grad_norm": 2.3395885319875824,
"learning_rate": 1.3332484682115916e-05,
"loss": 0.0588,
"step": 468
},
{
"epoch": 6.100813008130081,
"grad_norm": 1.9512362409390018,
"learning_rate": 1.3305665673387528e-05,
"loss": 0.0338,
"step": 469
},
{
"epoch": 6.1138211382113825,
"grad_norm": 2.690266981810868,
"learning_rate": 1.3278819944820893e-05,
"loss": 0.0337,
"step": 470
},
{
"epoch": 6.126829268292683,
"grad_norm": 3.4250412108092454,
"learning_rate": 1.3251947713411187e-05,
"loss": 0.0486,
"step": 471
},
{
"epoch": 6.139837398373984,
"grad_norm": 2.6736270241738485,
"learning_rate": 1.3225049196367814e-05,
"loss": 0.058,
"step": 472
},
{
"epoch": 6.152845528455284,
"grad_norm": 2.520984117218317,
"learning_rate": 1.319812461111265e-05,
"loss": 0.0378,
"step": 473
},
{
"epoch": 6.1658536585365855,
"grad_norm": 1.9537033631580494,
"learning_rate": 1.317117417527828e-05,
"loss": 0.0363,
"step": 474
},
{
"epoch": 6.178861788617886,
"grad_norm": 2.310023434054684,
"learning_rate": 1.314419810670624e-05,
"loss": 0.0345,
"step": 475
},
{
"epoch": 6.191869918699187,
"grad_norm": 2.292477954767942,
"learning_rate": 1.3117196623445253e-05,
"loss": 0.0354,
"step": 476
},
{
"epoch": 6.204878048780488,
"grad_norm": 3.093112196382202,
"learning_rate": 1.3090169943749475e-05,
"loss": 0.0514,
"step": 477
},
{
"epoch": 6.2178861788617885,
"grad_norm": 1.7238228081457507,
"learning_rate": 1.3063118286076726e-05,
"loss": 0.0261,
"step": 478
},
{
"epoch": 6.230894308943089,
"grad_norm": 2.4405985722534913,
"learning_rate": 1.3036041869086718e-05,
"loss": 0.0308,
"step": 479
},
{
"epoch": 6.2439024390243905,
"grad_norm": 2.855561384021881,
"learning_rate": 1.3008940911639302e-05,
"loss": 0.0472,
"step": 480
},
{
"epoch": 6.256910569105691,
"grad_norm": 2.1670982199844264,
"learning_rate": 1.2981815632792683e-05,
"loss": 0.0415,
"step": 481
},
{
"epoch": 6.2699186991869915,
"grad_norm": 3.555690563412571,
"learning_rate": 1.2954666251801662e-05,
"loss": 0.0448,
"step": 482
},
{
"epoch": 6.282926829268293,
"grad_norm": 2.8659727016916157,
"learning_rate": 1.2927492988115857e-05,
"loss": 0.0554,
"step": 483
},
{
"epoch": 6.2959349593495935,
"grad_norm": 3.278250470071062,
"learning_rate": 1.2900296061377927e-05,
"loss": 0.0366,
"step": 484
},
{
"epoch": 6.308943089430894,
"grad_norm": 3.9910474066295913,
"learning_rate": 1.2873075691421808e-05,
"loss": 0.0343,
"step": 485
},
{
"epoch": 6.321951219512195,
"grad_norm": 3.8233856715254353,
"learning_rate": 1.2845832098270925e-05,
"loss": 0.0771,
"step": 486
},
{
"epoch": 6.334959349593496,
"grad_norm": 3.0294426925564943,
"learning_rate": 1.2818565502136414e-05,
"loss": 0.0822,
"step": 487
},
{
"epoch": 6.3479674796747965,
"grad_norm": 4.003605690459721,
"learning_rate": 1.2791276123415348e-05,
"loss": 0.0691,
"step": 488
},
{
"epoch": 6.360975609756098,
"grad_norm": 3.1341159493935438,
"learning_rate": 1.2763964182688955e-05,
"loss": 0.0689,
"step": 489
},
{
"epoch": 6.373983739837398,
"grad_norm": 4.086900629835164,
"learning_rate": 1.2736629900720832e-05,
"loss": 0.0673,
"step": 490
},
{
"epoch": 6.386991869918699,
"grad_norm": 1.9378624994658462,
"learning_rate": 1.2709273498455152e-05,
"loss": 0.0225,
"step": 491
},
{
"epoch": 6.4,
"grad_norm": 2.1324669324753374,
"learning_rate": 1.26818951970149e-05,
"loss": 0.042,
"step": 492
},
{
"epoch": 6.413008130081301,
"grad_norm": 1.5886821998323448,
"learning_rate": 1.265449521770007e-05,
"loss": 0.0269,
"step": 493
},
{
"epoch": 6.426016260162601,
"grad_norm": 2.9030595838580413,
"learning_rate": 1.262707378198587e-05,
"loss": 0.0484,
"step": 494
},
{
"epoch": 6.439024390243903,
"grad_norm": 3.5977417503958535,
"learning_rate": 1.2599631111520956e-05,
"loss": 0.0508,
"step": 495
},
{
"epoch": 6.452032520325203,
"grad_norm": 3.6694174147590513,
"learning_rate": 1.2572167428125608e-05,
"loss": 0.0674,
"step": 496
},
{
"epoch": 6.465040650406504,
"grad_norm": 2.4129473140571136,
"learning_rate": 1.254468295378997e-05,
"loss": 0.0462,
"step": 497
},
{
"epoch": 6.478048780487805,
"grad_norm": 3.1858274317038577,
"learning_rate": 1.2517177910672237e-05,
"loss": 0.0552,
"step": 498
},
{
"epoch": 6.491056910569106,
"grad_norm": 2.5935148127658345,
"learning_rate": 1.2489652521096852e-05,
"loss": 0.0481,
"step": 499
},
{
"epoch": 6.504065040650406,
"grad_norm": 2.9450402573814567,
"learning_rate": 1.2462107007552726e-05,
"loss": 0.0582,
"step": 500
},
{
"epoch": 6.517073170731708,
"grad_norm": 2.7069332832569417,
"learning_rate": 1.2434541592691443e-05,
"loss": 0.0592,
"step": 501
},
{
"epoch": 6.530081300813008,
"grad_norm": 2.567349465362145,
"learning_rate": 1.2406956499325429e-05,
"loss": 0.0553,
"step": 502
},
{
"epoch": 6.543089430894309,
"grad_norm": 2.0895897218164396,
"learning_rate": 1.2379351950426188e-05,
"loss": 0.0334,
"step": 503
},
{
"epoch": 6.55609756097561,
"grad_norm": 1.8984990697792024,
"learning_rate": 1.2351728169122483e-05,
"loss": 0.0331,
"step": 504
},
{
"epoch": 6.569105691056911,
"grad_norm": 1.7581002434576,
"learning_rate": 1.2324085378698529e-05,
"loss": 0.0311,
"step": 505
},
{
"epoch": 6.582113821138211,
"grad_norm": 2.808270807082475,
"learning_rate": 1.229642380259219e-05,
"loss": 0.0542,
"step": 506
},
{
"epoch": 6.595121951219512,
"grad_norm": 2.5940978671200328,
"learning_rate": 1.2268743664393182e-05,
"loss": 0.0615,
"step": 507
},
{
"epoch": 6.608130081300813,
"grad_norm": 3.037820820822087,
"learning_rate": 1.2241045187841257e-05,
"loss": 0.0476,
"step": 508
},
{
"epoch": 6.621138211382114,
"grad_norm": 3.174677749036126,
"learning_rate": 1.2213328596824392e-05,
"loss": 0.0841,
"step": 509
},
{
"epoch": 6.634146341463414,
"grad_norm": 3.0257255090348982,
"learning_rate": 1.2185594115376991e-05,
"loss": 0.1044,
"step": 510
},
{
"epoch": 6.647154471544716,
"grad_norm": 3.4624261488209407,
"learning_rate": 1.2157841967678064e-05,
"loss": 0.0769,
"step": 511
},
{
"epoch": 6.660162601626016,
"grad_norm": 1.9612032892773348,
"learning_rate": 1.2130072378049416e-05,
"loss": 0.0304,
"step": 512
},
{
"epoch": 6.673170731707317,
"grad_norm": 2.5213552327747473,
"learning_rate": 1.2102285570953842e-05,
"loss": 0.0335,
"step": 513
},
{
"epoch": 6.686178861788618,
"grad_norm": 1.8981183407392777,
"learning_rate": 1.2074481770993298e-05,
"loss": 0.0342,
"step": 514
},
{
"epoch": 6.699186991869919,
"grad_norm": 4.290206859282562,
"learning_rate": 1.2046661202907101e-05,
"loss": 0.0945,
"step": 515
},
{
"epoch": 6.712195121951219,
"grad_norm": 1.8323648554205363,
"learning_rate": 1.2018824091570103e-05,
"loss": 0.0285,
"step": 516
},
{
"epoch": 6.725203252032521,
"grad_norm": 2.763087455511772,
"learning_rate": 1.1990970661990877e-05,
"loss": 0.0683,
"step": 517
},
{
"epoch": 6.738211382113821,
"grad_norm": 2.353806018174121,
"learning_rate": 1.1963101139309894e-05,
"loss": 0.0377,
"step": 518
},
{
"epoch": 6.751219512195122,
"grad_norm": 2.454104825810179,
"learning_rate": 1.1935215748797708e-05,
"loss": 0.0676,
"step": 519
},
{
"epoch": 6.764227642276423,
"grad_norm": 1.6279843510429204,
"learning_rate": 1.1907314715853138e-05,
"loss": 0.0348,
"step": 520
},
{
"epoch": 6.777235772357724,
"grad_norm": 2.6490086647572326,
"learning_rate": 1.187939826600143e-05,
"loss": 0.0455,
"step": 521
},
{
"epoch": 6.790243902439024,
"grad_norm": 2.0481762160031853,
"learning_rate": 1.1851466624892455e-05,
"loss": 0.033,
"step": 522
},
{
"epoch": 6.803252032520325,
"grad_norm": 2.094011332691674,
"learning_rate": 1.1823520018298877e-05,
"loss": 0.0496,
"step": 523
},
{
"epoch": 6.816260162601626,
"grad_norm": 3.246270225708711,
"learning_rate": 1.1795558672114321e-05,
"loss": 0.0836,
"step": 524
},
{
"epoch": 6.829268292682927,
"grad_norm": 3.006525188811639,
"learning_rate": 1.176758281235155e-05,
"loss": 0.0409,
"step": 525
},
{
"epoch": 6.842276422764227,
"grad_norm": 2.4038041178955933,
"learning_rate": 1.1739592665140652e-05,
"loss": 0.0479,
"step": 526
},
{
"epoch": 6.855284552845529,
"grad_norm": 2.4243653320767162,
"learning_rate": 1.1711588456727187e-05,
"loss": 0.0395,
"step": 527
},
{
"epoch": 6.868292682926829,
"grad_norm": 2.897618990620674,
"learning_rate": 1.1683570413470384e-05,
"loss": 0.0891,
"step": 528
},
{
"epoch": 6.88130081300813,
"grad_norm": 2.7286570543114737,
"learning_rate": 1.16555387618413e-05,
"loss": 0.0713,
"step": 529
},
{
"epoch": 6.894308943089431,
"grad_norm": 2.610695316382916,
"learning_rate": 1.1627493728420978e-05,
"loss": 0.0541,
"step": 530
},
{
"epoch": 6.907317073170732,
"grad_norm": 2.814844562765686,
"learning_rate": 1.1599435539898636e-05,
"loss": 0.0414,
"step": 531
},
{
"epoch": 6.920325203252032,
"grad_norm": 1.7831279324354765,
"learning_rate": 1.1571364423069822e-05,
"loss": 0.0286,
"step": 532
},
{
"epoch": 6.933333333333334,
"grad_norm": 1.8353679190961905,
"learning_rate": 1.1543280604834581e-05,
"loss": 0.0291,
"step": 533
},
{
"epoch": 6.946341463414634,
"grad_norm": 1.8813576863746508,
"learning_rate": 1.151518431219563e-05,
"loss": 0.0243,
"step": 534
},
{
"epoch": 6.959349593495935,
"grad_norm": 2.485555139073027,
"learning_rate": 1.1487075772256517e-05,
"loss": 0.0465,
"step": 535
},
{
"epoch": 6.972357723577236,
"grad_norm": 2.0103036241801204,
"learning_rate": 1.145895521221978e-05,
"loss": 0.0337,
"step": 536
},
{
"epoch": 6.985365853658537,
"grad_norm": 1.7855330363134008,
"learning_rate": 1.143082285938512e-05,
"loss": 0.0307,
"step": 537
},
{
"epoch": 6.998373983739837,
"grad_norm": 3.0176841119969025,
"learning_rate": 1.1402678941147557e-05,
"loss": 0.0596,
"step": 538
},
{
"epoch": 7.0113821138211385,
"grad_norm": 1.4748242874579913,
"learning_rate": 1.13745236849956e-05,
"loss": 0.0172,
"step": 539
},
{
"epoch": 7.024390243902439,
"grad_norm": 0.9195000087565399,
"learning_rate": 1.1346357318509395e-05,
"loss": 0.0176,
"step": 540
},
{
"epoch": 7.03739837398374,
"grad_norm": 1.9857762694373016,
"learning_rate": 1.1318180069358901e-05,
"loss": 0.025,
"step": 541
},
{
"epoch": 7.050406504065041,
"grad_norm": 1.0243241165825754,
"learning_rate": 1.1289992165302036e-05,
"loss": 0.0188,
"step": 542
},
{
"epoch": 7.0634146341463415,
"grad_norm": 1.0609924781993678,
"learning_rate": 1.1261793834182843e-05,
"loss": 0.014,
"step": 543
},
{
"epoch": 7.076422764227642,
"grad_norm": 1.737319207779907,
"learning_rate": 1.1233585303929654e-05,
"loss": 0.0232,
"step": 544
},
{
"epoch": 7.0894308943089435,
"grad_norm": 1.7478320052621348,
"learning_rate": 1.1205366802553231e-05,
"loss": 0.0236,
"step": 545
},
{
"epoch": 7.102439024390244,
"grad_norm": 1.7972556961794965,
"learning_rate": 1.1177138558144941e-05,
"loss": 0.0276,
"step": 546
},
{
"epoch": 7.1154471544715445,
"grad_norm": 2.071753371598691,
"learning_rate": 1.1148900798874904e-05,
"loss": 0.0212,
"step": 547
},
{
"epoch": 7.128455284552846,
"grad_norm": 1.454333842329555,
"learning_rate": 1.112065375299014e-05,
"loss": 0.0209,
"step": 548
},
{
"epoch": 7.1414634146341465,
"grad_norm": 2.2128806729074304,
"learning_rate": 1.1092397648812746e-05,
"loss": 0.0285,
"step": 549
},
{
"epoch": 7.154471544715447,
"grad_norm": 0.6146676806963332,
"learning_rate": 1.1064132714738024e-05,
"loss": 0.0083,
"step": 550
},
{
"epoch": 7.167479674796748,
"grad_norm": 1.474108395054986,
"learning_rate": 1.1035859179232661e-05,
"loss": 0.0162,
"step": 551
},
{
"epoch": 7.180487804878049,
"grad_norm": 1.7333842083551732,
"learning_rate": 1.1007577270832864e-05,
"loss": 0.0259,
"step": 552
},
{
"epoch": 7.1934959349593495,
"grad_norm": 2.6323575548170677,
"learning_rate": 1.0979287218142518e-05,
"loss": 0.0395,
"step": 553
},
{
"epoch": 7.20650406504065,
"grad_norm": 1.0719131678482425,
"learning_rate": 1.0950989249831337e-05,
"loss": 0.0124,
"step": 554
},
{
"epoch": 7.219512195121951,
"grad_norm": 1.5833243700550403,
"learning_rate": 1.092268359463302e-05,
"loss": 0.0099,
"step": 555
},
{
"epoch": 7.232520325203252,
"grad_norm": 2.2905756467347302,
"learning_rate": 1.0894370481343406e-05,
"loss": 0.0328,
"step": 556
},
{
"epoch": 7.2455284552845525,
"grad_norm": 1.3978528077488823,
"learning_rate": 1.0866050138818602e-05,
"loss": 0.0124,
"step": 557
},
{
"epoch": 7.258536585365854,
"grad_norm": 2.3538286947826994,
"learning_rate": 1.0837722795973162e-05,
"loss": 0.0223,
"step": 558
},
{
"epoch": 7.271544715447154,
"grad_norm": 1.8186330421511054,
"learning_rate": 1.0809388681778223e-05,
"loss": 0.0216,
"step": 559
},
{
"epoch": 7.284552845528455,
"grad_norm": 0.945401196778342,
"learning_rate": 1.0781048025259648e-05,
"loss": 0.0134,
"step": 560
},
{
"epoch": 7.297560975609756,
"grad_norm": 1.9517488407833354,
"learning_rate": 1.0752701055496188e-05,
"loss": 0.0129,
"step": 561
},
{
"epoch": 7.310569105691057,
"grad_norm": 1.9648390533798386,
"learning_rate": 1.0724348001617626e-05,
"loss": 0.0237,
"step": 562
},
{
"epoch": 7.323577235772357,
"grad_norm": 1.3295665523096223,
"learning_rate": 1.0695989092802914e-05,
"loss": 0.0146,
"step": 563
},
{
"epoch": 7.336585365853659,
"grad_norm": 0.8273172380539461,
"learning_rate": 1.0667624558278338e-05,
"loss": 0.0074,
"step": 564
},
{
"epoch": 7.349593495934959,
"grad_norm": 1.284860974532405,
"learning_rate": 1.0639254627315658e-05,
"loss": 0.0146,
"step": 565
},
{
"epoch": 7.36260162601626,
"grad_norm": 1.6309635277243517,
"learning_rate": 1.0610879529230242e-05,
"loss": 0.0169,
"step": 566
},
{
"epoch": 7.375609756097561,
"grad_norm": 1.9766783501257195,
"learning_rate": 1.058249949337924e-05,
"loss": 0.0459,
"step": 567
},
{
"epoch": 7.388617886178862,
"grad_norm": 0.9502632800397175,
"learning_rate": 1.05541147491597e-05,
"loss": 0.007,
"step": 568
},
{
"epoch": 7.401626016260162,
"grad_norm": 1.9982959250199734,
"learning_rate": 1.0525725526006738e-05,
"loss": 0.0201,
"step": 569
},
{
"epoch": 7.414634146341464,
"grad_norm": 2.0754641095590722,
"learning_rate": 1.049733205339167e-05,
"loss": 0.0186,
"step": 570
},
{
"epoch": 7.427642276422764,
"grad_norm": 2.0848531141884425,
"learning_rate": 1.0468934560820157e-05,
"loss": 0.0172,
"step": 571
},
{
"epoch": 7.440650406504065,
"grad_norm": 2.251271110215539,
"learning_rate": 1.0440533277830355e-05,
"loss": 0.0119,
"step": 572
},
{
"epoch": 7.453658536585366,
"grad_norm": 1.4639038307850378,
"learning_rate": 1.0412128433991064e-05,
"loss": 0.0123,
"step": 573
},
{
"epoch": 7.466666666666667,
"grad_norm": 1.2110922205359993,
"learning_rate": 1.0383720258899864e-05,
"loss": 0.0153,
"step": 574
},
{
"epoch": 7.479674796747967,
"grad_norm": 3.4877321355015356,
"learning_rate": 1.0355308982181254e-05,
"loss": 0.0278,
"step": 575
},
{
"epoch": 7.492682926829268,
"grad_norm": 0.7032740951581019,
"learning_rate": 1.032689483348481e-05,
"loss": 0.0075,
"step": 576
},
{
"epoch": 7.505691056910569,
"grad_norm": 0.8684769334973417,
"learning_rate": 1.0298478042483327e-05,
"loss": 0.0095,
"step": 577
},
{
"epoch": 7.51869918699187,
"grad_norm": 2.423271319680519,
"learning_rate": 1.0270058838870948e-05,
"loss": 0.0227,
"step": 578
},
{
"epoch": 7.53170731707317,
"grad_norm": 1.3407617488265873,
"learning_rate": 1.0241637452361323e-05,
"loss": 0.0115,
"step": 579
},
{
"epoch": 7.544715447154472,
"grad_norm": 1.207276254144989,
"learning_rate": 1.0213214112685747e-05,
"loss": 0.0141,
"step": 580
},
{
"epoch": 7.557723577235772,
"grad_norm": 0.5055444072299614,
"learning_rate": 1.01847890495913e-05,
"loss": 0.0043,
"step": 581
},
{
"epoch": 7.570731707317073,
"grad_norm": 1.7627894585314987,
"learning_rate": 1.0156362492838991e-05,
"loss": 0.0171,
"step": 582
},
{
"epoch": 7.583739837398374,
"grad_norm": 2.359898057520564,
"learning_rate": 1.012793467220191e-05,
"loss": 0.0182,
"step": 583
},
{
"epoch": 7.596747967479675,
"grad_norm": 3.3346044899148204,
"learning_rate": 1.0099505817463351e-05,
"loss": 0.0327,
"step": 584
},
{
"epoch": 7.609756097560975,
"grad_norm": 4.237874349630483,
"learning_rate": 1.0071076158414977e-05,
"loss": 0.0549,
"step": 585
},
{
"epoch": 7.622764227642277,
"grad_norm": 3.816331876941365,
"learning_rate": 1.004264592485495e-05,
"loss": 0.0485,
"step": 586
},
{
"epoch": 7.635772357723577,
"grad_norm": 1.6227184769502474,
"learning_rate": 1.001421534658607e-05,
"loss": 0.0089,
"step": 587
},
{
"epoch": 7.648780487804878,
"grad_norm": 2.299066537768556,
"learning_rate": 9.985784653413931e-06,
"loss": 0.0346,
"step": 588
},
{
"epoch": 7.661788617886179,
"grad_norm": 1.1559078136154577,
"learning_rate": 9.957354075145053e-06,
"loss": 0.0106,
"step": 589
},
{
"epoch": 7.67479674796748,
"grad_norm": 1.1595226487375585,
"learning_rate": 9.928923841585025e-06,
"loss": 0.0108,
"step": 590
},
{
"epoch": 7.68780487804878,
"grad_norm": 1.0757704349557082,
"learning_rate": 9.900494182536652e-06,
"loss": 0.0095,
"step": 591
},
{
"epoch": 7.700813008130082,
"grad_norm": 4.099082054983232,
"learning_rate": 9.872065327798092e-06,
"loss": 0.0486,
"step": 592
},
{
"epoch": 7.713821138211382,
"grad_norm": 2.292496055927853,
"learning_rate": 9.84363750716101e-06,
"loss": 0.0235,
"step": 593
},
{
"epoch": 7.726829268292683,
"grad_norm": 1.9178358301562846,
"learning_rate": 9.815210950408703e-06,
"loss": 0.0168,
"step": 594
},
{
"epoch": 7.739837398373984,
"grad_norm": 1.7570666578565255,
"learning_rate": 9.786785887314255e-06,
"loss": 0.0216,
"step": 595
},
{
"epoch": 7.752845528455285,
"grad_norm": 2.906135856571168,
"learning_rate": 9.75836254763868e-06,
"loss": 0.036,
"step": 596
},
{
"epoch": 7.765853658536585,
"grad_norm": 0.6102186666671133,
"learning_rate": 9.729941161129055e-06,
"loss": 0.0052,
"step": 597
},
{
"epoch": 7.778861788617887,
"grad_norm": 2.006642525644417,
"learning_rate": 9.701521957516677e-06,
"loss": 0.0354,
"step": 598
},
{
"epoch": 7.791869918699187,
"grad_norm": 1.6112202056054103,
"learning_rate": 9.673105166515194e-06,
"loss": 0.021,
"step": 599
},
{
"epoch": 7.804878048780488,
"grad_norm": 1.28486396638533,
"learning_rate": 9.644691017818752e-06,
"loss": 0.0113,
"step": 600
},
{
"epoch": 7.897560975609756,
"grad_norm": 1.1570836064474173,
"learning_rate": 9.616279741100141e-06,
"loss": 0.0163,
"step": 601
},
{
"epoch": 7.9105691056910565,
"grad_norm": 4.3926195896460705,
"learning_rate": 9.587871566008941e-06,
"loss": 0.0372,
"step": 602
},
{
"epoch": 7.923577235772358,
"grad_norm": 1.8374516838829014,
"learning_rate": 9.55946672216965e-06,
"loss": 0.0152,
"step": 603
},
{
"epoch": 7.9365853658536585,
"grad_norm": 1.2538360344208235,
"learning_rate": 9.53106543917985e-06,
"loss": 0.0079,
"step": 604
},
{
"epoch": 7.949593495934959,
"grad_norm": 1.6551949026862187,
"learning_rate": 9.502667946608332e-06,
"loss": 0.022,
"step": 605
},
{
"epoch": 7.96260162601626,
"grad_norm": 1.2298212750460404,
"learning_rate": 9.474274473993263e-06,
"loss": 0.0144,
"step": 606
},
{
"epoch": 7.975609756097561,
"grad_norm": 0.5496419989314434,
"learning_rate": 9.445885250840301e-06,
"loss": 0.0054,
"step": 607
},
{
"epoch": 7.9886178861788615,
"grad_norm": 1.3353474096544538,
"learning_rate": 9.41750050662076e-06,
"loss": 0.0112,
"step": 608
},
{
"epoch": 8.001626016260163,
"grad_norm": 1.1581286863052545,
"learning_rate": 9.389120470769758e-06,
"loss": 0.0091,
"step": 609
},
{
"epoch": 8.014634146341463,
"grad_norm": 1.696248770025462,
"learning_rate": 9.360745372684346e-06,
"loss": 0.0232,
"step": 610
},
{
"epoch": 8.027642276422764,
"grad_norm": 0.9635336545656762,
"learning_rate": 9.332375441721664e-06,
"loss": 0.0125,
"step": 611
},
{
"epoch": 8.040650406504065,
"grad_norm": 1.7995568016097954,
"learning_rate": 9.304010907197088e-06,
"loss": 0.0152,
"step": 612
},
{
"epoch": 8.053658536585365,
"grad_norm": 0.5218744745869878,
"learning_rate": 9.275651998382377e-06,
"loss": 0.0037,
"step": 613
},
{
"epoch": 8.066666666666666,
"grad_norm": 1.7593678574145648,
"learning_rate": 9.247298944503813e-06,
"loss": 0.0144,
"step": 614
},
{
"epoch": 8.079674796747968,
"grad_norm": 1.0242522857715741,
"learning_rate": 9.218951974740354e-06,
"loss": 0.0058,
"step": 615
},
{
"epoch": 8.092682926829267,
"grad_norm": 0.804026841919947,
"learning_rate": 9.190611318221779e-06,
"loss": 0.0043,
"step": 616
},
{
"epoch": 8.105691056910569,
"grad_norm": 0.8639328680213971,
"learning_rate": 9.162277204026841e-06,
"loss": 0.009,
"step": 617
},
{
"epoch": 8.11869918699187,
"grad_norm": 0.6692064656778831,
"learning_rate": 9.1339498611814e-06,
"loss": 0.0059,
"step": 618
},
{
"epoch": 8.13170731707317,
"grad_norm": 1.3302886569969412,
"learning_rate": 9.105629518656597e-06,
"loss": 0.008,
"step": 619
},
{
"epoch": 8.144715447154471,
"grad_norm": 2.7029834067969,
"learning_rate": 9.07731640536698e-06,
"loss": 0.0087,
"step": 620
},
{
"epoch": 8.157723577235773,
"grad_norm": 0.5950805638509996,
"learning_rate": 9.049010750168667e-06,
"loss": 0.005,
"step": 621
},
{
"epoch": 8.170731707317072,
"grad_norm": 1.473020833784232,
"learning_rate": 9.020712781857485e-06,
"loss": 0.0093,
"step": 622
},
{
"epoch": 8.183739837398374,
"grad_norm": 1.323143857293457,
"learning_rate": 8.99242272916714e-06,
"loss": 0.0077,
"step": 623
},
{
"epoch": 8.196747967479675,
"grad_norm": 0.6013128053864051,
"learning_rate": 8.96414082076734e-06,
"loss": 0.0055,
"step": 624
},
{
"epoch": 8.209756097560975,
"grad_norm": 0.6933361341239108,
"learning_rate": 8.935867285261977e-06,
"loss": 0.0038,
"step": 625
},
{
"epoch": 8.222764227642276,
"grad_norm": 0.7643716646547513,
"learning_rate": 8.90760235118726e-06,
"loss": 0.0059,
"step": 626
},
{
"epoch": 8.235772357723578,
"grad_norm": 1.6083959007721669,
"learning_rate": 8.879346247009862e-06,
"loss": 0.0183,
"step": 627
},
{
"epoch": 8.248780487804877,
"grad_norm": 1.7481203666565401,
"learning_rate": 8.851099201125098e-06,
"loss": 0.0124,
"step": 628
},
{
"epoch": 8.261788617886179,
"grad_norm": 2.979391453099003,
"learning_rate": 8.822861441855062e-06,
"loss": 0.0472,
"step": 629
},
{
"epoch": 8.27479674796748,
"grad_norm": 1.5205590984667567,
"learning_rate": 8.79463319744677e-06,
"loss": 0.012,
"step": 630
},
{
"epoch": 8.28780487804878,
"grad_norm": 0.45025282821354934,
"learning_rate": 8.76641469607035e-06,
"loss": 0.0032,
"step": 631
},
{
"epoch": 8.300813008130081,
"grad_norm": 1.6291831313222467,
"learning_rate": 8.738206165817162e-06,
"loss": 0.0174,
"step": 632
},
{
"epoch": 8.313821138211383,
"grad_norm": 1.8672390073053273,
"learning_rate": 8.71000783469797e-06,
"loss": 0.0161,
"step": 633
},
{
"epoch": 8.326829268292682,
"grad_norm": 0.6624522215545257,
"learning_rate": 8.681819930641104e-06,
"loss": 0.0047,
"step": 634
},
{
"epoch": 8.339837398373984,
"grad_norm": 1.2872295772729532,
"learning_rate": 8.653642681490608e-06,
"loss": 0.0068,
"step": 635
},
{
"epoch": 8.352845528455285,
"grad_norm": 1.9001799919981712,
"learning_rate": 8.625476315004406e-06,
"loss": 0.0115,
"step": 636
},
{
"epoch": 8.365853658536585,
"grad_norm": 1.1211077846124613,
"learning_rate": 8.597321058852446e-06,
"loss": 0.0078,
"step": 637
},
{
"epoch": 8.378861788617886,
"grad_norm": 0.5257992324029892,
"learning_rate": 8.569177140614884e-06,
"loss": 0.0033,
"step": 638
},
{
"epoch": 8.391869918699188,
"grad_norm": 0.9500921805600161,
"learning_rate": 8.541044787780223e-06,
"loss": 0.0121,
"step": 639
},
{
"epoch": 8.404878048780487,
"grad_norm": 1.590484580117083,
"learning_rate": 8.512924227743482e-06,
"loss": 0.0183,
"step": 640
},
{
"epoch": 8.417886178861789,
"grad_norm": 1.3523870862348597,
"learning_rate": 8.48481568780437e-06,
"loss": 0.0059,
"step": 641
},
{
"epoch": 8.43089430894309,
"grad_norm": 0.3643386165943172,
"learning_rate": 8.45671939516542e-06,
"loss": 0.0027,
"step": 642
},
{
"epoch": 8.44390243902439,
"grad_norm": 0.4038839717069236,
"learning_rate": 8.42863557693018e-06,
"loss": 0.0033,
"step": 643
},
{
"epoch": 8.456910569105691,
"grad_norm": 4.872152725527478,
"learning_rate": 8.400564460101365e-06,
"loss": 0.0064,
"step": 644
},
{
"epoch": 8.469918699186993,
"grad_norm": 1.566250126609204,
"learning_rate": 8.372506271579022e-06,
"loss": 0.0155,
"step": 645
},
{
"epoch": 8.482926829268292,
"grad_norm": 1.4695419969482413,
"learning_rate": 8.3444612381587e-06,
"loss": 0.0181,
"step": 646
},
{
"epoch": 8.495934959349594,
"grad_norm": 0.6092218830403479,
"learning_rate": 8.316429586529616e-06,
"loss": 0.004,
"step": 647
},
{
"epoch": 8.508943089430895,
"grad_norm": 0.6271026393173923,
"learning_rate": 8.288411543272814e-06,
"loss": 0.004,
"step": 648
},
{
"epoch": 8.521951219512195,
"grad_norm": 0.45814933354877085,
"learning_rate": 8.260407334859352e-06,
"loss": 0.0032,
"step": 649
},
{
"epoch": 8.534959349593496,
"grad_norm": 0.5630633751133635,
"learning_rate": 8.232417187648454e-06,
"loss": 0.0033,
"step": 650
},
{
"epoch": 8.547967479674798,
"grad_norm": 2.1864551163075236,
"learning_rate": 8.204441327885682e-06,
"loss": 0.0202,
"step": 651
},
{
"epoch": 8.560975609756097,
"grad_norm": 1.6493473678926696,
"learning_rate": 8.176479981701124e-06,
"loss": 0.0062,
"step": 652
},
{
"epoch": 8.573983739837399,
"grad_norm": 1.5925655533524505,
"learning_rate": 8.148533375107547e-06,
"loss": 0.0125,
"step": 653
},
{
"epoch": 8.5869918699187,
"grad_norm": 0.7658342201667933,
"learning_rate": 8.120601733998573e-06,
"loss": 0.0037,
"step": 654
},
{
"epoch": 8.6,
"grad_norm": 0.6802727123664065,
"learning_rate": 8.092685284146865e-06,
"loss": 0.0058,
"step": 655
},
{
"epoch": 8.613008130081301,
"grad_norm": 0.6428487324704795,
"learning_rate": 8.064784251202295e-06,
"loss": 0.0034,
"step": 656
},
{
"epoch": 8.6260162601626,
"grad_norm": 1.2283433307632814,
"learning_rate": 8.036898860690109e-06,
"loss": 0.0078,
"step": 657
},
{
"epoch": 8.639024390243902,
"grad_norm": 1.017938353807125,
"learning_rate": 8.009029338009124e-06,
"loss": 0.0047,
"step": 658
},
{
"epoch": 8.652032520325204,
"grad_norm": 0.7001890029191871,
"learning_rate": 7.9811759084299e-06,
"loss": 0.0036,
"step": 659
},
{
"epoch": 8.665040650406503,
"grad_norm": 0.633347645128458,
"learning_rate": 7.953338797092902e-06,
"loss": 0.0046,
"step": 660
},
{
"epoch": 8.678048780487805,
"grad_norm": 1.2025605383952795,
"learning_rate": 7.925518229006706e-06,
"loss": 0.0093,
"step": 661
},
{
"epoch": 8.691056910569106,
"grad_norm": 2.633448780438676,
"learning_rate": 7.897714429046161e-06,
"loss": 0.0146,
"step": 662
},
{
"epoch": 8.704065040650406,
"grad_norm": 0.5596034491248052,
"learning_rate": 7.869927621950589e-06,
"loss": 0.0028,
"step": 663
},
{
"epoch": 8.717073170731707,
"grad_norm": 1.157553594361386,
"learning_rate": 7.84215803232194e-06,
"loss": 0.008,
"step": 664
},
{
"epoch": 8.730081300813008,
"grad_norm": 0.9018826372017152,
"learning_rate": 7.814405884623012e-06,
"loss": 0.0044,
"step": 665
},
{
"epoch": 8.743089430894308,
"grad_norm": 0.23290196917996137,
"learning_rate": 7.786671403175613e-06,
"loss": 0.0012,
"step": 666
},
{
"epoch": 8.75609756097561,
"grad_norm": 0.09872361493258511,
"learning_rate": 7.758954812158748e-06,
"loss": 0.0007,
"step": 667
},
{
"epoch": 8.769105691056911,
"grad_norm": 2.416464152136664,
"learning_rate": 7.73125633560682e-06,
"loss": 0.0334,
"step": 668
},
{
"epoch": 8.78211382113821,
"grad_norm": 1.8383531775741038,
"learning_rate": 7.703576197407814e-06,
"loss": 0.0129,
"step": 669
},
{
"epoch": 8.795121951219512,
"grad_norm": 0.5846243555775849,
"learning_rate": 7.675914621301476e-06,
"loss": 0.0046,
"step": 670
},
{
"epoch": 8.808130081300813,
"grad_norm": 0.20276419203159612,
"learning_rate": 7.64827183087752e-06,
"loss": 0.0013,
"step": 671
},
{
"epoch": 8.821138211382113,
"grad_norm": 2.250697908537039,
"learning_rate": 7.620648049573815e-06,
"loss": 0.0198,
"step": 672
},
{
"epoch": 8.834146341463414,
"grad_norm": 2.5506639659880697,
"learning_rate": 7.593043500674576e-06,
"loss": 0.0335,
"step": 673
},
{
"epoch": 8.847154471544716,
"grad_norm": 0.1760344339783254,
"learning_rate": 7.565458407308562e-06,
"loss": 0.0013,
"step": 674
},
{
"epoch": 8.860162601626016,
"grad_norm": 0.9878490804168167,
"learning_rate": 7.5378929924472735e-06,
"loss": 0.0062,
"step": 675
},
{
"epoch": 8.873170731707317,
"grad_norm": 1.2003584506062117,
"learning_rate": 7.51034747890315e-06,
"loss": 0.0132,
"step": 676
},
{
"epoch": 8.886178861788618,
"grad_norm": 0.6698985442501936,
"learning_rate": 7.482822089327766e-06,
"loss": 0.004,
"step": 677
},
{
"epoch": 8.899186991869918,
"grad_norm": 1.8608580951848903,
"learning_rate": 7.455317046210029e-06,
"loss": 0.0152,
"step": 678
},
{
"epoch": 8.91219512195122,
"grad_norm": 1.955648048483486,
"learning_rate": 7.427832571874391e-06,
"loss": 0.009,
"step": 679
},
{
"epoch": 8.92520325203252,
"grad_norm": 0.6522291760736664,
"learning_rate": 7.400368888479048e-06,
"loss": 0.0026,
"step": 680
},
{
"epoch": 8.93821138211382,
"grad_norm": 1.8126549078970746,
"learning_rate": 7.372926218014131e-06,
"loss": 0.0112,
"step": 681
},
{
"epoch": 8.951219512195122,
"grad_norm": 3.7519826805559906,
"learning_rate": 7.345504782299933e-06,
"loss": 0.039,
"step": 682
},
{
"epoch": 8.964227642276423,
"grad_norm": 2.1405391277026395,
"learning_rate": 7.318104802985102e-06,
"loss": 0.0029,
"step": 683
},
{
"epoch": 8.977235772357723,
"grad_norm": 3.2663905500123995,
"learning_rate": 7.29072650154485e-06,
"loss": 0.0133,
"step": 684
},
{
"epoch": 8.990243902439024,
"grad_norm": 1.4235721126316785,
"learning_rate": 7.263370099279173e-06,
"loss": 0.0075,
"step": 685
},
{
"epoch": 9.003252032520326,
"grad_norm": 1.5535694922192906,
"learning_rate": 7.236035817311047e-06,
"loss": 0.0094,
"step": 686
},
{
"epoch": 9.016260162601625,
"grad_norm": 0.2627479940056018,
"learning_rate": 7.208723876584654e-06,
"loss": 0.0017,
"step": 687
},
{
"epoch": 9.029268292682927,
"grad_norm": 0.4520005178424521,
"learning_rate": 7.181434497863589e-06,
"loss": 0.0019,
"step": 688
},
{
"epoch": 9.042276422764228,
"grad_norm": 0.17942256470122298,
"learning_rate": 7.154167901729078e-06,
"loss": 0.0014,
"step": 689
},
{
"epoch": 9.055284552845528,
"grad_norm": 0.27697334188523803,
"learning_rate": 7.126924308578196e-06,
"loss": 0.002,
"step": 690
},
{
"epoch": 9.06829268292683,
"grad_norm": 0.34183068785785425,
"learning_rate": 7.099703938622076e-06,
"loss": 0.0018,
"step": 691
},
{
"epoch": 9.08130081300813,
"grad_norm": 1.3803401856075173,
"learning_rate": 7.072507011884146e-06,
"loss": 0.0118,
"step": 692
},
{
"epoch": 9.09430894308943,
"grad_norm": 1.3568397929755587,
"learning_rate": 7.045333748198342e-06,
"loss": 0.006,
"step": 693
},
{
"epoch": 9.107317073170732,
"grad_norm": 0.38380133051188836,
"learning_rate": 7.0181843672073195e-06,
"loss": 0.0027,
"step": 694
},
{
"epoch": 9.120325203252033,
"grad_norm": 0.33699787698320505,
"learning_rate": 6.9910590883607e-06,
"loss": 0.0028,
"step": 695
},
{
"epoch": 9.133333333333333,
"grad_norm": 0.9219452083351827,
"learning_rate": 6.963958130913285e-06,
"loss": 0.0038,
"step": 696
},
{
"epoch": 9.146341463414634,
"grad_norm": 0.22872769458941739,
"learning_rate": 6.936881713923278e-06,
"loss": 0.0016,
"step": 697
},
{
"epoch": 9.159349593495936,
"grad_norm": 1.8659145036422158,
"learning_rate": 6.909830056250527e-06,
"loss": 0.0073,
"step": 698
},
{
"epoch": 9.172357723577235,
"grad_norm": 0.8714505646577918,
"learning_rate": 6.882803376554752e-06,
"loss": 0.0041,
"step": 699
},
{
"epoch": 9.185365853658537,
"grad_norm": 3.7669183636793786,
"learning_rate": 6.855801893293765e-06,
"loss": 0.0574,
"step": 700
},
{
"epoch": 9.198373983739838,
"grad_norm": 1.8011229635123818,
"learning_rate": 6.828825824721723e-06,
"loss": 0.0073,
"step": 701
},
{
"epoch": 9.211382113821138,
"grad_norm": 1.1367034179206732,
"learning_rate": 6.801875388887356e-06,
"loss": 0.0073,
"step": 702
},
{
"epoch": 9.22439024390244,
"grad_norm": 0.3445914797421055,
"learning_rate": 6.774950803632192e-06,
"loss": 0.002,
"step": 703
},
{
"epoch": 9.23739837398374,
"grad_norm": 0.3772220820545059,
"learning_rate": 6.74805228658882e-06,
"loss": 0.0019,
"step": 704
},
{
"epoch": 9.25040650406504,
"grad_norm": 0.6069595408777112,
"learning_rate": 6.721180055179113e-06,
"loss": 0.0036,
"step": 705
},
{
"epoch": 9.263414634146342,
"grad_norm": 0.18806254090946373,
"learning_rate": 6.694334326612475e-06,
"loss": 0.0014,
"step": 706
},
{
"epoch": 9.276422764227643,
"grad_norm": 0.6835511302578737,
"learning_rate": 6.66751531788409e-06,
"loss": 0.0029,
"step": 707
},
{
"epoch": 9.289430894308943,
"grad_norm": 0.7556461894159618,
"learning_rate": 6.640723245773162e-06,
"loss": 0.004,
"step": 708
},
{
"epoch": 9.302439024390244,
"grad_norm": 0.4976813766777833,
"learning_rate": 6.613958326841163e-06,
"loss": 0.0021,
"step": 709
},
{
"epoch": 9.315447154471546,
"grad_norm": 0.19880631922304534,
"learning_rate": 6.587220777430097e-06,
"loss": 0.0013,
"step": 710
},
{
"epoch": 9.328455284552845,
"grad_norm": 0.5526404773632339,
"learning_rate": 6.560510813660719e-06,
"loss": 0.0024,
"step": 711
},
{
"epoch": 9.341463414634147,
"grad_norm": 0.3618818809888966,
"learning_rate": 6.533828651430823e-06,
"loss": 0.0022,
"step": 712
},
{
"epoch": 9.354471544715448,
"grad_norm": 0.13968931300117193,
"learning_rate": 6.507174506413476e-06,
"loss": 0.0011,
"step": 713
},
{
"epoch": 9.367479674796748,
"grad_norm": 0.545676069641217,
"learning_rate": 6.480548594055285e-06,
"loss": 0.0028,
"step": 714
},
{
"epoch": 9.38048780487805,
"grad_norm": 0.651992152152611,
"learning_rate": 6.453951129574644e-06,
"loss": 0.0037,
"step": 715
},
{
"epoch": 9.393495934959349,
"grad_norm": 0.08047968696500124,
"learning_rate": 6.427382327960008e-06,
"loss": 0.0006,
"step": 716
},
{
"epoch": 9.40650406504065,
"grad_norm": 2.505129451573409,
"learning_rate": 6.400842403968148e-06,
"loss": 0.0133,
"step": 717
},
{
"epoch": 9.419512195121952,
"grad_norm": 1.971414547082271,
"learning_rate": 6.374331572122413e-06,
"loss": 0.0137,
"step": 718
},
{
"epoch": 9.432520325203251,
"grad_norm": 0.08388839040822257,
"learning_rate": 6.3478500467109995e-06,
"loss": 0.0007,
"step": 719
},
{
"epoch": 9.445528455284553,
"grad_norm": 0.14957469944874793,
"learning_rate": 6.321398041785225e-06,
"loss": 0.0009,
"step": 720
},
{
"epoch": 9.458536585365854,
"grad_norm": 0.2293748761638421,
"learning_rate": 6.294975771157779e-06,
"loss": 0.002,
"step": 721
},
{
"epoch": 9.471544715447154,
"grad_norm": 1.1348199558360812,
"learning_rate": 6.26858344840102e-06,
"loss": 0.0033,
"step": 722
},
{
"epoch": 9.484552845528455,
"grad_norm": 0.17444247050438766,
"learning_rate": 6.24222128684523e-06,
"loss": 0.0011,
"step": 723
},
{
"epoch": 9.497560975609757,
"grad_norm": 1.469322104576111,
"learning_rate": 6.215889499576898e-06,
"loss": 0.0041,
"step": 724
},
{
"epoch": 9.510569105691056,
"grad_norm": 2.4640290623372456,
"learning_rate": 6.189588299436997e-06,
"loss": 0.0057,
"step": 725
},
{
"epoch": 9.523577235772358,
"grad_norm": 0.3551828514072661,
"learning_rate": 6.163317899019263e-06,
"loss": 0.0017,
"step": 726
},
{
"epoch": 9.536585365853659,
"grad_norm": 1.9778045389994514,
"learning_rate": 6.137078510668475e-06,
"loss": 0.0054,
"step": 727
},
{
"epoch": 9.549593495934959,
"grad_norm": 0.1988055551588179,
"learning_rate": 6.11087034647874e-06,
"loss": 0.0016,
"step": 728
},
{
"epoch": 9.56260162601626,
"grad_norm": 1.1742874707944437,
"learning_rate": 6.084693618291784e-06,
"loss": 0.0053,
"step": 729
},
{
"epoch": 9.575609756097561,
"grad_norm": 0.5141180298405635,
"learning_rate": 6.058548537695225e-06,
"loss": 0.0023,
"step": 730
},
{
"epoch": 9.588617886178861,
"grad_norm": 0.3974291589148598,
"learning_rate": 6.032435316020879e-06,
"loss": 0.0031,
"step": 731
},
{
"epoch": 9.601626016260163,
"grad_norm": 0.144069333032306,
"learning_rate": 6.006354164343047e-06,
"loss": 0.001,
"step": 732
},
{
"epoch": 9.614634146341464,
"grad_norm": 0.0854524512610507,
"learning_rate": 5.980305293476798e-06,
"loss": 0.0007,
"step": 733
},
{
"epoch": 9.627642276422764,
"grad_norm": 1.0676606151474946,
"learning_rate": 5.954288913976282e-06,
"loss": 0.0049,
"step": 734
},
{
"epoch": 9.640650406504065,
"grad_norm": 0.22481585335882875,
"learning_rate": 5.928305236133016e-06,
"loss": 0.0014,
"step": 735
},
{
"epoch": 9.653658536585366,
"grad_norm": 1.5806927451491446,
"learning_rate": 5.9023544699741916e-06,
"loss": 0.0074,
"step": 736
},
{
"epoch": 9.666666666666666,
"grad_norm": 3.162133500470142,
"learning_rate": 5.876436825260967e-06,
"loss": 0.0236,
"step": 737
},
{
"epoch": 9.679674796747967,
"grad_norm": 0.6497244678275477,
"learning_rate": 5.8505525114867845e-06,
"loss": 0.0023,
"step": 738
},
{
"epoch": 9.692682926829269,
"grad_norm": 0.20942650687885297,
"learning_rate": 5.824701737875671e-06,
"loss": 0.0015,
"step": 739
},
{
"epoch": 9.705691056910569,
"grad_norm": 0.21783176506446844,
"learning_rate": 5.798884713380542e-06,
"loss": 0.0009,
"step": 740
},
{
"epoch": 9.71869918699187,
"grad_norm": 1.3397879966577473,
"learning_rate": 5.773101646681519e-06,
"loss": 0.0053,
"step": 741
},
{
"epoch": 9.731707317073171,
"grad_norm": 0.5629435457967111,
"learning_rate": 5.747352746184246e-06,
"loss": 0.0029,
"step": 742
},
{
"epoch": 9.744715447154471,
"grad_norm": 0.2655804464635928,
"learning_rate": 5.721638220018195e-06,
"loss": 0.0015,
"step": 743
},
{
"epoch": 9.757723577235772,
"grad_norm": 1.2625485903808695,
"learning_rate": 5.695958276034992e-06,
"loss": 0.0109,
"step": 744
},
{
"epoch": 9.770731707317074,
"grad_norm": 0.6053445135651991,
"learning_rate": 5.67031312180673e-06,
"loss": 0.0018,
"step": 745
},
{
"epoch": 9.783739837398373,
"grad_norm": 0.2216958815586712,
"learning_rate": 5.644702964624298e-06,
"loss": 0.0014,
"step": 746
},
{
"epoch": 9.796747967479675,
"grad_norm": 0.5151779928761385,
"learning_rate": 5.6191280114957e-06,
"loss": 0.0017,
"step": 747
},
{
"epoch": 9.809756097560976,
"grad_norm": 0.6574952728794218,
"learning_rate": 5.593588469144393e-06,
"loss": 0.0061,
"step": 748
},
{
"epoch": 9.822764227642276,
"grad_norm": 0.5715758912209028,
"learning_rate": 5.5680845440075885e-06,
"loss": 0.0018,
"step": 749
},
{
"epoch": 9.835772357723577,
"grad_norm": 0.1623161803252056,
"learning_rate": 5.542616442234618e-06,
"loss": 0.001,
"step": 750
},
{
"epoch": 9.848780487804879,
"grad_norm": 0.9840006400935422,
"learning_rate": 5.517184369685249e-06,
"loss": 0.006,
"step": 751
},
{
"epoch": 9.861788617886178,
"grad_norm": 0.24112095169278702,
"learning_rate": 5.4917885319280174e-06,
"loss": 0.0011,
"step": 752
},
{
"epoch": 9.87479674796748,
"grad_norm": 0.0574912432392828,
"learning_rate": 5.46642913423858e-06,
"loss": 0.0004,
"step": 753
},
{
"epoch": 9.887804878048781,
"grad_norm": 0.39818352891783554,
"learning_rate": 5.441106381598038e-06,
"loss": 0.0018,
"step": 754
},
{
"epoch": 9.900813008130081,
"grad_norm": 0.4845619747401539,
"learning_rate": 5.415820478691301e-06,
"loss": 0.0026,
"step": 755
},
{
"epoch": 9.913821138211382,
"grad_norm": 0.1830833298630534,
"learning_rate": 5.390571629905404e-06,
"loss": 0.001,
"step": 756
},
{
"epoch": 9.926829268292684,
"grad_norm": 0.47093815573108905,
"learning_rate": 5.365360039327883e-06,
"loss": 0.0029,
"step": 757
},
{
"epoch": 9.939837398373983,
"grad_norm": 1.1293829121916676,
"learning_rate": 5.340185910745115e-06,
"loss": 0.0064,
"step": 758
},
{
"epoch": 9.952845528455285,
"grad_norm": 1.1535164398174766,
"learning_rate": 5.3150494476406655e-06,
"loss": 0.008,
"step": 759
},
{
"epoch": 9.965853658536584,
"grad_norm": 0.6455882175180273,
"learning_rate": 5.2899508531936526e-06,
"loss": 0.0044,
"step": 760
},
{
"epoch": 9.978861788617886,
"grad_norm": 0.13020549451493543,
"learning_rate": 5.2648903302771e-06,
"loss": 0.0009,
"step": 761
},
{
"epoch": 9.991869918699187,
"grad_norm": 1.3934610223710313,
"learning_rate": 5.2398680814562875e-06,
"loss": 0.01,
"step": 762
},
{
"epoch": 10.004878048780487,
"grad_norm": 0.47932911623345126,
"learning_rate": 5.214884308987136e-06,
"loss": 0.0019,
"step": 763
},
{
"epoch": 10.017886178861788,
"grad_norm": 0.11341961597130322,
"learning_rate": 5.189939214814558e-06,
"loss": 0.0008,
"step": 764
},
{
"epoch": 10.03089430894309,
"grad_norm": 0.07014317124050574,
"learning_rate": 5.165033000570825e-06,
"loss": 0.0006,
"step": 765
},
{
"epoch": 10.04390243902439,
"grad_norm": 0.4939383938541964,
"learning_rate": 5.14016586757394e-06,
"loss": 0.002,
"step": 766
},
{
"epoch": 10.05691056910569,
"grad_norm": 0.051143749930634086,
"learning_rate": 5.115338016826017e-06,
"loss": 0.0004,
"step": 767
},
{
"epoch": 10.069918699186992,
"grad_norm": 0.15061806975433353,
"learning_rate": 5.0905496490116355e-06,
"loss": 0.0008,
"step": 768
},
{
"epoch": 10.082926829268292,
"grad_norm": 0.05789693918615477,
"learning_rate": 5.065800964496248e-06,
"loss": 0.0005,
"step": 769
},
{
"epoch": 10.095934959349593,
"grad_norm": 0.1742458351231767,
"learning_rate": 5.041092163324537e-06,
"loss": 0.0013,
"step": 770
},
{
"epoch": 10.108943089430895,
"grad_norm": 0.061702963689786644,
"learning_rate": 5.01642344521881e-06,
"loss": 0.0006,
"step": 771
},
{
"epoch": 10.121951219512194,
"grad_norm": 0.23180841253300452,
"learning_rate": 4.99179500957738e-06,
"loss": 0.0014,
"step": 772
},
{
"epoch": 10.134959349593496,
"grad_norm": 4.190784176626806,
"learning_rate": 4.967207055472953e-06,
"loss": 0.0078,
"step": 773
},
{
"epoch": 10.147967479674797,
"grad_norm": 0.08044864398385289,
"learning_rate": 4.942659781651028e-06,
"loss": 0.0006,
"step": 774
},
{
"epoch": 10.160975609756097,
"grad_norm": 0.08134366410064657,
"learning_rate": 4.918153386528271e-06,
"loss": 0.0005,
"step": 775
},
{
"epoch": 10.173983739837398,
"grad_norm": 0.3386130404211274,
"learning_rate": 4.893688068190933e-06,
"loss": 0.0014,
"step": 776
},
{
"epoch": 10.1869918699187,
"grad_norm": 0.07694587965043274,
"learning_rate": 4.8692640243932385e-06,
"loss": 0.0006,
"step": 777
},
{
"epoch": 10.2,
"grad_norm": 0.20369709305033337,
"learning_rate": 4.844881452555785e-06,
"loss": 0.0008,
"step": 778
},
{
"epoch": 10.2130081300813,
"grad_norm": 0.2331731469796336,
"learning_rate": 4.820540549763962e-06,
"loss": 0.0011,
"step": 779
},
{
"epoch": 10.226016260162602,
"grad_norm": 0.050888921989665374,
"learning_rate": 4.7962415127663265e-06,
"loss": 0.0003,
"step": 780
},
{
"epoch": 10.239024390243902,
"grad_norm": 0.19440076973555925,
"learning_rate": 4.771984537973046e-06,
"loss": 0.0012,
"step": 781
},
{
"epoch": 10.252032520325203,
"grad_norm": 0.05850485970509135,
"learning_rate": 4.747769821454295e-06,
"loss": 0.0006,
"step": 782
},
{
"epoch": 10.265040650406505,
"grad_norm": 0.10448560046139084,
"learning_rate": 4.7235975589386715e-06,
"loss": 0.0009,
"step": 783
},
{
"epoch": 10.278048780487804,
"grad_norm": 0.0924812647057576,
"learning_rate": 4.6994679458116165e-06,
"loss": 0.0005,
"step": 784
},
{
"epoch": 10.291056910569106,
"grad_norm": 0.05976270109537895,
"learning_rate": 4.675381177113837e-06,
"loss": 0.0005,
"step": 785
},
{
"epoch": 10.304065040650407,
"grad_norm": 0.21102456712599046,
"learning_rate": 4.651337447539716e-06,
"loss": 0.0013,
"step": 786
},
{
"epoch": 10.317073170731707,
"grad_norm": 0.07346705031308397,
"learning_rate": 4.627336951435762e-06,
"loss": 0.0004,
"step": 787
},
{
"epoch": 10.330081300813008,
"grad_norm": 0.19520075710266213,
"learning_rate": 4.603379882799018e-06,
"loss": 0.001,
"step": 788
},
{
"epoch": 10.34308943089431,
"grad_norm": 0.05360207841255482,
"learning_rate": 4.579466435275506e-06,
"loss": 0.0004,
"step": 789
},
{
"epoch": 10.35609756097561,
"grad_norm": 0.30877901494761745,
"learning_rate": 4.555596802158653e-06,
"loss": 0.0012,
"step": 790
},
{
"epoch": 10.36910569105691,
"grad_norm": 0.4614215876073173,
"learning_rate": 4.531771176387737e-06,
"loss": 0.0015,
"step": 791
},
{
"epoch": 10.382113821138212,
"grad_norm": 0.04330375717864531,
"learning_rate": 4.507989750546311e-06,
"loss": 0.0004,
"step": 792
},
{
"epoch": 10.395121951219512,
"grad_norm": 0.06165414339063373,
"learning_rate": 4.484252716860671e-06,
"loss": 0.0004,
"step": 793
},
{
"epoch": 10.408130081300813,
"grad_norm": 0.09271709128689157,
"learning_rate": 4.460560267198283e-06,
"loss": 0.0006,
"step": 794
},
{
"epoch": 10.421138211382114,
"grad_norm": 0.04333394944073435,
"learning_rate": 4.436912593066241e-06,
"loss": 0.0003,
"step": 795
},
{
"epoch": 10.434146341463414,
"grad_norm": 0.11960246314871008,
"learning_rate": 4.4133098856097146e-06,
"loss": 0.0008,
"step": 796
},
{
"epoch": 10.447154471544716,
"grad_norm": 0.13175061578921837,
"learning_rate": 4.389752335610405e-06,
"loss": 0.0008,
"step": 797
},
{
"epoch": 10.460162601626017,
"grad_norm": 0.05315970701379665,
"learning_rate": 4.366240133485012e-06,
"loss": 0.0004,
"step": 798
},
{
"epoch": 10.473170731707317,
"grad_norm": 0.4637700425973471,
"learning_rate": 4.342773469283671e-06,
"loss": 0.0014,
"step": 799
},
{
"epoch": 10.486178861788618,
"grad_norm": 0.04958968480470787,
"learning_rate": 4.319352532688444e-06,
"loss": 0.0004,
"step": 800
},
{
"epoch": 10.49918699186992,
"grad_norm": 0.38056287228121716,
"learning_rate": 4.295977513011772e-06,
"loss": 0.0016,
"step": 801
},
{
"epoch": 10.512195121951219,
"grad_norm": 0.1879844427210077,
"learning_rate": 4.272648599194948e-06,
"loss": 0.0009,
"step": 802
},
{
"epoch": 10.52520325203252,
"grad_norm": 0.30687686213960086,
"learning_rate": 4.24936597980659e-06,
"loss": 0.0014,
"step": 803
},
{
"epoch": 10.538211382113822,
"grad_norm": 1.042027841255737,
"learning_rate": 4.226129843041117e-06,
"loss": 0.0025,
"step": 804
},
{
"epoch": 10.551219512195122,
"grad_norm": 0.24226722224286953,
"learning_rate": 4.2029403767172175e-06,
"loss": 0.0008,
"step": 805
},
{
"epoch": 10.564227642276423,
"grad_norm": 0.029573196433914463,
"learning_rate": 4.1797977682763535e-06,
"loss": 0.0003,
"step": 806
},
{
"epoch": 10.577235772357724,
"grad_norm": 0.03207686584574384,
"learning_rate": 4.156702204781226e-06,
"loss": 0.0003,
"step": 807
},
{
"epoch": 10.590243902439024,
"grad_norm": 0.06776985466966881,
"learning_rate": 4.1336538729142716e-06,
"loss": 0.0005,
"step": 808
},
{
"epoch": 10.603252032520325,
"grad_norm": 0.4331598268082103,
"learning_rate": 4.110652958976151e-06,
"loss": 0.0023,
"step": 809
},
{
"epoch": 10.616260162601627,
"grad_norm": 0.12176586228909014,
"learning_rate": 4.087699648884248e-06,
"loss": 0.0006,
"step": 810
},
{
"epoch": 10.629268292682926,
"grad_norm": 0.023639997640290365,
"learning_rate": 4.06479412817115e-06,
"loss": 0.0002,
"step": 811
},
{
"epoch": 10.642276422764228,
"grad_norm": 1.0793003098116523,
"learning_rate": 4.041936581983171e-06,
"loss": 0.0031,
"step": 812
},
{
"epoch": 10.65528455284553,
"grad_norm": 0.050689185847761324,
"learning_rate": 4.019127195078848e-06,
"loss": 0.0005,
"step": 813
},
{
"epoch": 10.668292682926829,
"grad_norm": 0.06105866488827697,
"learning_rate": 3.996366151827438e-06,
"loss": 0.0004,
"step": 814
},
{
"epoch": 10.68130081300813,
"grad_norm": 0.09274757864473174,
"learning_rate": 3.973653636207437e-06,
"loss": 0.0004,
"step": 815
},
{
"epoch": 10.694308943089432,
"grad_norm": 0.30895592046615056,
"learning_rate": 3.950989831805083e-06,
"loss": 0.0009,
"step": 816
},
{
"epoch": 10.707317073170731,
"grad_norm": 0.036528951408162195,
"learning_rate": 3.9283749218128885e-06,
"loss": 0.0003,
"step": 817
},
{
"epoch": 10.720325203252033,
"grad_norm": 0.2523889547985841,
"learning_rate": 3.905809089028145e-06,
"loss": 0.0011,
"step": 818
},
{
"epoch": 10.733333333333333,
"grad_norm": 0.06255343477291914,
"learning_rate": 3.883292515851454e-06,
"loss": 0.0005,
"step": 819
},
{
"epoch": 10.746341463414634,
"grad_norm": 0.0523211695671239,
"learning_rate": 3.860825384285247e-06,
"loss": 0.0004,
"step": 820
},
{
"epoch": 10.759349593495935,
"grad_norm": 0.041601465054852405,
"learning_rate": 3.838407875932322e-06,
"loss": 0.0004,
"step": 821
},
{
"epoch": 10.772357723577235,
"grad_norm": 0.0801657959053447,
"learning_rate": 3.816040171994359e-06,
"loss": 0.0006,
"step": 822
},
{
"epoch": 10.785365853658536,
"grad_norm": 0.05331580296840854,
"learning_rate": 3.7937224532704752e-06,
"loss": 0.0004,
"step": 823
},
{
"epoch": 10.798373983739838,
"grad_norm": 0.12791635373473556,
"learning_rate": 3.7714549001557576e-06,
"loss": 0.0007,
"step": 824
},
{
"epoch": 10.811382113821137,
"grad_norm": 0.367029561233173,
"learning_rate": 3.7492376926397966e-06,
"loss": 0.0011,
"step": 825
},
{
"epoch": 10.824390243902439,
"grad_norm": 0.059589520409157146,
"learning_rate": 3.727071010305239e-06,
"loss": 0.0005,
"step": 826
},
{
"epoch": 10.83739837398374,
"grad_norm": 0.08847191690003295,
"learning_rate": 3.704955032326335e-06,
"loss": 0.0006,
"step": 827
},
{
"epoch": 10.85040650406504,
"grad_norm": 0.1688036545392985,
"learning_rate": 3.6828899374674933e-06,
"loss": 0.001,
"step": 828
},
{
"epoch": 10.863414634146341,
"grad_norm": 0.04553062700432191,
"learning_rate": 3.6608759040818177e-06,
"loss": 0.0003,
"step": 829
},
{
"epoch": 10.876422764227643,
"grad_norm": 0.10373692909530455,
"learning_rate": 3.6389131101096953e-06,
"loss": 0.0005,
"step": 830
},
{
"epoch": 10.889430894308942,
"grad_norm": 0.15740397668819048,
"learning_rate": 3.6170017330773376e-06,
"loss": 0.0007,
"step": 831
},
{
"epoch": 10.902439024390244,
"grad_norm": 0.09190872520558221,
"learning_rate": 3.595141950095349e-06,
"loss": 0.0006,
"step": 832
},
{
"epoch": 10.915447154471545,
"grad_norm": 0.06947890757526237,
"learning_rate": 3.5733339378572998e-06,
"loss": 0.0004,
"step": 833
},
{
"epoch": 10.928455284552845,
"grad_norm": 0.052252610519002735,
"learning_rate": 3.5515778726382967e-06,
"loss": 0.0004,
"step": 834
},
{
"epoch": 10.941463414634146,
"grad_norm": 0.9351651325004157,
"learning_rate": 3.529873930293546e-06,
"loss": 0.0207,
"step": 835
},
{
"epoch": 10.954471544715448,
"grad_norm": 0.07147932407374252,
"learning_rate": 3.508222286256955e-06,
"loss": 0.0005,
"step": 836
},
{
"epoch": 10.967479674796747,
"grad_norm": 1.4479000052444952,
"learning_rate": 3.4866231155396946e-06,
"loss": 0.0056,
"step": 837
},
{
"epoch": 10.980487804878049,
"grad_norm": 0.24001697191680987,
"learning_rate": 3.4650765927287955e-06,
"loss": 0.0007,
"step": 838
},
{
"epoch": 10.99349593495935,
"grad_norm": 0.7946933804387598,
"learning_rate": 3.4435828919857293e-06,
"loss": 0.0023,
"step": 839
},
{
"epoch": 11.00650406504065,
"grad_norm": 0.0583487914604126,
"learning_rate": 3.422142187045011e-06,
"loss": 0.0004,
"step": 840
},
{
"epoch": 11.019512195121951,
"grad_norm": 0.0692336991994657,
"learning_rate": 3.4007546512127764e-06,
"loss": 0.0005,
"step": 841
},
{
"epoch": 11.032520325203253,
"grad_norm": 0.05014142713723524,
"learning_rate": 3.3794204573654043e-06,
"loss": 0.0004,
"step": 842
},
{
"epoch": 11.045528455284552,
"grad_norm": 0.03386760470869016,
"learning_rate": 3.358139777948104e-06,
"loss": 0.0003,
"step": 843
},
{
"epoch": 11.058536585365854,
"grad_norm": 0.04672681758339113,
"learning_rate": 3.3369127849735237e-06,
"loss": 0.0003,
"step": 844
},
{
"epoch": 11.071544715447155,
"grad_norm": 0.049940267202782294,
"learning_rate": 3.3157396500203655e-06,
"loss": 0.0004,
"step": 845
},
{
"epoch": 11.084552845528455,
"grad_norm": 0.18806749291579217,
"learning_rate": 3.294620544231991e-06,
"loss": 0.0009,
"step": 846
},
{
"epoch": 11.097560975609756,
"grad_norm": 0.04899849394667562,
"learning_rate": 3.2735556383150447e-06,
"loss": 0.0003,
"step": 847
},
{
"epoch": 11.110569105691058,
"grad_norm": 0.04307550472460912,
"learning_rate": 3.2525451025380693e-06,
"loss": 0.0004,
"step": 848
},
{
"epoch": 11.123577235772357,
"grad_norm": 0.49388859118382206,
"learning_rate": 3.231589106730132e-06,
"loss": 0.0076,
"step": 849
},
{
"epoch": 11.136585365853659,
"grad_norm": 0.05069730872605736,
"learning_rate": 3.2106878202794513e-06,
"loss": 0.0004,
"step": 850
},
{
"epoch": 11.14959349593496,
"grad_norm": 0.057858205118389204,
"learning_rate": 3.1898414121320277e-06,
"loss": 0.0004,
"step": 851
},
{
"epoch": 11.16260162601626,
"grad_norm": 0.029023636208937914,
"learning_rate": 3.169050050790279e-06,
"loss": 0.0003,
"step": 852
},
{
"epoch": 11.175609756097561,
"grad_norm": 0.03195630089651303,
"learning_rate": 3.1483139043116705e-06,
"loss": 0.0003,
"step": 853
},
{
"epoch": 11.188617886178863,
"grad_norm": 0.04531455132918734,
"learning_rate": 3.1276331403073733e-06,
"loss": 0.0004,
"step": 854
},
{
"epoch": 11.201626016260162,
"grad_norm": 0.01844463580988382,
"learning_rate": 3.1070079259408934e-06,
"loss": 0.0002,
"step": 855
},
{
"epoch": 11.214634146341464,
"grad_norm": 0.05339672933260151,
"learning_rate": 3.086438427926729e-06,
"loss": 0.0003,
"step": 856
},
{
"epoch": 11.227642276422765,
"grad_norm": 0.049322797447503446,
"learning_rate": 3.06592481252902e-06,
"loss": 0.0002,
"step": 857
},
{
"epoch": 11.240650406504065,
"grad_norm": 0.020868257890228357,
"learning_rate": 3.0454672455602077e-06,
"loss": 0.0002,
"step": 858
},
{
"epoch": 11.253658536585366,
"grad_norm": 0.024943525917216745,
"learning_rate": 3.0250658923796817e-06,
"loss": 0.0002,
"step": 859
},
{
"epoch": 11.266666666666667,
"grad_norm": 0.035124702260310954,
"learning_rate": 3.004720917892464e-06,
"loss": 0.0003,
"step": 860
},
{
"epoch": 11.279674796747967,
"grad_norm": 0.04254706825809259,
"learning_rate": 2.98443248654786e-06,
"loss": 0.0004,
"step": 861
},
{
"epoch": 11.292682926829269,
"grad_norm": 0.024005278793993265,
"learning_rate": 2.9642007623381376e-06,
"loss": 0.0002,
"step": 862
},
{
"epoch": 11.305691056910568,
"grad_norm": 1.057501147418873,
"learning_rate": 2.9440259087971944e-06,
"loss": 0.0057,
"step": 863
},
{
"epoch": 11.31869918699187,
"grad_norm": 0.0299414357816384,
"learning_rate": 2.9239080889992454e-06,
"loss": 0.0002,
"step": 864
},
{
"epoch": 11.331707317073171,
"grad_norm": 0.03690924120021281,
"learning_rate": 2.90384746555749e-06,
"loss": 0.0004,
"step": 865
},
{
"epoch": 11.34471544715447,
"grad_norm": 0.05208938145471617,
"learning_rate": 2.883844200622816e-06,
"loss": 0.0004,
"step": 866
},
{
"epoch": 11.357723577235772,
"grad_norm": 0.290281887424443,
"learning_rate": 2.8638984558824777e-06,
"loss": 0.0009,
"step": 867
},
{
"epoch": 11.370731707317074,
"grad_norm": 0.04915729918173791,
"learning_rate": 2.8440103925587904e-06,
"loss": 0.0004,
"step": 868
},
{
"epoch": 11.383739837398373,
"grad_norm": 0.06593317390602493,
"learning_rate": 2.8241801714078254e-06,
"loss": 0.0004,
"step": 869
},
{
"epoch": 11.396747967479675,
"grad_norm": 0.034788808154885505,
"learning_rate": 2.804407952718119e-06,
"loss": 0.0003,
"step": 870
},
{
"epoch": 11.409756097560976,
"grad_norm": 0.05600817228855695,
"learning_rate": 2.7846938963093683e-06,
"loss": 0.0004,
"step": 871
},
{
"epoch": 11.422764227642276,
"grad_norm": 0.05937485095579203,
"learning_rate": 2.7650381615311384e-06,
"loss": 0.0005,
"step": 872
},
{
"epoch": 11.435772357723577,
"grad_norm": 0.025121660471457082,
"learning_rate": 2.745440907261582e-06,
"loss": 0.0002,
"step": 873
},
{
"epoch": 11.448780487804878,
"grad_norm": 0.027086682946642057,
"learning_rate": 2.7259022919061516e-06,
"loss": 0.0002,
"step": 874
},
{
"epoch": 11.461788617886178,
"grad_norm": 0.03781043440974685,
"learning_rate": 2.7064224733963197e-06,
"loss": 0.0003,
"step": 875
},
{
"epoch": 11.47479674796748,
"grad_norm": 0.03806746386020083,
"learning_rate": 2.6870016091882968e-06,
"loss": 0.0003,
"step": 876
},
{
"epoch": 11.487804878048781,
"grad_norm": 0.0698792772525063,
"learning_rate": 2.667639856261771e-06,
"loss": 0.0004,
"step": 877
},
{
"epoch": 11.50081300813008,
"grad_norm": 0.02308277789386836,
"learning_rate": 2.6483373711186213e-06,
"loss": 0.0002,
"step": 878
},
{
"epoch": 11.513821138211382,
"grad_norm": 0.058620780517023637,
"learning_rate": 2.629094309781669e-06,
"loss": 0.0004,
"step": 879
},
{
"epoch": 11.526829268292683,
"grad_norm": 0.04634404185503362,
"learning_rate": 2.6099108277934105e-06,
"loss": 0.0004,
"step": 880
},
{
"epoch": 11.539837398373983,
"grad_norm": 0.02593398850907055,
"learning_rate": 2.590787080214757e-06,
"loss": 0.0002,
"step": 881
},
{
"epoch": 11.552845528455284,
"grad_norm": 0.027323695595300873,
"learning_rate": 2.5717232216237865e-06,
"loss": 0.0002,
"step": 882
},
{
"epoch": 11.565853658536586,
"grad_norm": 0.027927908013963904,
"learning_rate": 2.55271940611449e-06,
"loss": 0.0002,
"step": 883
},
{
"epoch": 11.578861788617886,
"grad_norm": 0.20774017342640688,
"learning_rate": 2.5337757872955282e-06,
"loss": 0.0004,
"step": 884
},
{
"epoch": 11.591869918699187,
"grad_norm": 0.13864879096596824,
"learning_rate": 2.514892518288988e-06,
"loss": 0.0007,
"step": 885
},
{
"epoch": 11.604878048780488,
"grad_norm": 0.026076672691952213,
"learning_rate": 2.49606975172915e-06,
"loss": 0.0002,
"step": 886
},
{
"epoch": 11.617886178861788,
"grad_norm": 0.04436392211830201,
"learning_rate": 2.477307639761246e-06,
"loss": 0.0004,
"step": 887
},
{
"epoch": 11.63089430894309,
"grad_norm": 0.022320285463323698,
"learning_rate": 2.4586063340402412e-06,
"loss": 0.0002,
"step": 888
},
{
"epoch": 11.64390243902439,
"grad_norm": 0.031220022095961104,
"learning_rate": 2.43996598572959e-06,
"loss": 0.0003,
"step": 889
},
{
"epoch": 11.65691056910569,
"grad_norm": 0.06035507897095567,
"learning_rate": 2.421386745500034e-06,
"loss": 0.0004,
"step": 890
},
{
"epoch": 11.669918699186992,
"grad_norm": 0.03710983474696094,
"learning_rate": 2.4028687635283743e-06,
"loss": 0.0004,
"step": 891
},
{
"epoch": 11.682926829268293,
"grad_norm": 0.049833344321699204,
"learning_rate": 2.384412189496258e-06,
"loss": 0.0004,
"step": 892
},
{
"epoch": 11.695934959349593,
"grad_norm": 0.06906129660585766,
"learning_rate": 2.3660171725889703e-06,
"loss": 0.0006,
"step": 893
},
{
"epoch": 11.708943089430894,
"grad_norm": 0.057295409850243895,
"learning_rate": 2.347683861494228e-06,
"loss": 0.0003,
"step": 894
},
{
"epoch": 11.721951219512196,
"grad_norm": 0.04104143179661514,
"learning_rate": 2.329412404400969e-06,
"loss": 0.0003,
"step": 895
},
{
"epoch": 11.734959349593495,
"grad_norm": 0.030406462955230886,
"learning_rate": 2.3112029489981746e-06,
"loss": 0.0003,
"step": 896
},
{
"epoch": 11.747967479674797,
"grad_norm": 0.027529253931946367,
"learning_rate": 2.2930556424736574e-06,
"loss": 0.0003,
"step": 897
},
{
"epoch": 11.760975609756098,
"grad_norm": 0.024338381539098602,
"learning_rate": 2.274970631512878e-06,
"loss": 0.0002,
"step": 898
},
{
"epoch": 11.773983739837398,
"grad_norm": 0.05105180412521588,
"learning_rate": 2.2569480622977626e-06,
"loss": 0.0003,
"step": 899
},
{
"epoch": 11.7869918699187,
"grad_norm": 0.04170692352890064,
"learning_rate": 2.238988080505513e-06,
"loss": 0.0003,
"step": 900
},
{
"epoch": 11.8,
"grad_norm": 0.030129284415571365,
"learning_rate": 2.221090831307441e-06,
"loss": 0.0003,
"step": 901
},
{
"epoch": 11.8130081300813,
"grad_norm": 0.053205067060277646,
"learning_rate": 2.2032564593677773e-06,
"loss": 0.0004,
"step": 902
},
{
"epoch": 11.826016260162602,
"grad_norm": 0.04196901029871406,
"learning_rate": 2.1854851088425245e-06,
"loss": 0.0003,
"step": 903
},
{
"epoch": 11.839024390243903,
"grad_norm": 1.6008115113288401,
"learning_rate": 2.167776923378274e-06,
"loss": 0.003,
"step": 904
},
{
"epoch": 11.852032520325203,
"grad_norm": 0.08179428627670947,
"learning_rate": 2.150132046111054e-06,
"loss": 0.0006,
"step": 905
},
{
"epoch": 11.865040650406504,
"grad_norm": 0.045901122815486736,
"learning_rate": 2.132550619665168e-06,
"loss": 0.0004,
"step": 906
},
{
"epoch": 11.878048780487806,
"grad_norm": 0.04273976044021606,
"learning_rate": 2.115032786152047e-06,
"loss": 0.0003,
"step": 907
},
{
"epoch": 11.891056910569105,
"grad_norm": 0.03487722679277039,
"learning_rate": 2.0975786871690897e-06,
"loss": 0.0003,
"step": 908
},
{
"epoch": 11.904065040650407,
"grad_norm": 0.040993841061677555,
"learning_rate": 2.0801884637985337e-06,
"loss": 0.0003,
"step": 909
},
{
"epoch": 11.917073170731708,
"grad_norm": 0.048500935037488466,
"learning_rate": 2.0628622566063063e-06,
"loss": 0.0004,
"step": 910
},
{
"epoch": 11.930081300813008,
"grad_norm": 0.04576348639630146,
"learning_rate": 2.045600205640885e-06,
"loss": 0.0004,
"step": 911
},
{
"epoch": 11.94308943089431,
"grad_norm": 0.02565353345548741,
"learning_rate": 2.0284024504321754e-06,
"loss": 0.0002,
"step": 912
},
{
"epoch": 11.95609756097561,
"grad_norm": 0.04584969030642572,
"learning_rate": 2.011269129990376e-06,
"loss": 0.0004,
"step": 913
},
{
"epoch": 11.96910569105691,
"grad_norm": 0.19179968524650484,
"learning_rate": 1.9942003828048497e-06,
"loss": 0.0008,
"step": 914
},
{
"epoch": 11.982113821138212,
"grad_norm": 0.04706427979351206,
"learning_rate": 1.977196346843019e-06,
"loss": 0.0003,
"step": 915
},
{
"epoch": 11.995121951219513,
"grad_norm": 0.027584453796653467,
"learning_rate": 1.960257159549238e-06,
"loss": 0.0003,
"step": 916
},
{
"epoch": 12.008130081300813,
"grad_norm": 0.03077492388241451,
"learning_rate": 1.9433829578436937e-06,
"loss": 0.0003,
"step": 917
},
{
"epoch": 12.021138211382114,
"grad_norm": 0.04214761267907801,
"learning_rate": 1.926573878121286e-06,
"loss": 0.0004,
"step": 918
},
{
"epoch": 12.034146341463414,
"grad_norm": 0.04821007473769064,
"learning_rate": 1.9098300562505266e-06,
"loss": 0.0004,
"step": 919
},
{
"epoch": 12.047154471544715,
"grad_norm": 0.04150457101156769,
"learning_rate": 1.8931516275724527e-06,
"loss": 0.0004,
"step": 920
},
{
"epoch": 12.060162601626017,
"grad_norm": 0.02300453156783074,
"learning_rate": 1.8765387268995217e-06,
"loss": 0.0002,
"step": 921
},
{
"epoch": 12.073170731707316,
"grad_norm": 0.060825220544094435,
"learning_rate": 1.8599914885145287e-06,
"loss": 0.0004,
"step": 922
},
{
"epoch": 12.086178861788618,
"grad_norm": 0.03260378620826329,
"learning_rate": 1.8435100461695131e-06,
"loss": 0.0002,
"step": 923
},
{
"epoch": 12.099186991869919,
"grad_norm": 0.062443505010852336,
"learning_rate": 1.8270945330846868e-06,
"loss": 0.0003,
"step": 924
},
{
"epoch": 12.112195121951219,
"grad_norm": 0.04336228230253458,
"learning_rate": 1.8107450819473505e-06,
"loss": 0.0004,
"step": 925
},
{
"epoch": 12.12520325203252,
"grad_norm": 0.02709980396856279,
"learning_rate": 1.7944618249108204e-06,
"loss": 0.0002,
"step": 926
},
{
"epoch": 12.138211382113822,
"grad_norm": 0.03262436647450725,
"learning_rate": 1.7782448935933649e-06,
"loss": 0.0003,
"step": 927
},
{
"epoch": 12.151219512195121,
"grad_norm": 0.029557746567540205,
"learning_rate": 1.7620944190771427e-06,
"loss": 0.0003,
"step": 928
},
{
"epoch": 12.164227642276423,
"grad_norm": 0.012624259598801554,
"learning_rate": 1.7460105319071353e-06,
"loss": 0.0001,
"step": 929
},
{
"epoch": 12.177235772357724,
"grad_norm": 0.02914506084362277,
"learning_rate": 1.7299933620900945e-06,
"loss": 0.0003,
"step": 930
},
{
"epoch": 12.190243902439024,
"grad_norm": 0.039341397329373785,
"learning_rate": 1.7140430390935003e-06,
"loss": 0.0003,
"step": 931
},
{
"epoch": 12.203252032520325,
"grad_norm": 0.03480900877050601,
"learning_rate": 1.6981596918444953e-06,
"loss": 0.0003,
"step": 932
},
{
"epoch": 12.216260162601627,
"grad_norm": 0.01844993284555088,
"learning_rate": 1.6823434487288626e-06,
"loss": 0.0002,
"step": 933
},
{
"epoch": 12.229268292682926,
"grad_norm": 0.11082080083206294,
"learning_rate": 1.6665944375899778e-06,
"loss": 0.0004,
"step": 934
},
{
"epoch": 12.242276422764228,
"grad_norm": 0.02963595573823533,
"learning_rate": 1.6509127857277784e-06,
"loss": 0.0003,
"step": 935
},
{
"epoch": 12.255284552845529,
"grad_norm": 0.015490232540061447,
"learning_rate": 1.6352986198977327e-06,
"loss": 0.0002,
"step": 936
},
{
"epoch": 12.268292682926829,
"grad_norm": 0.036946837222196494,
"learning_rate": 1.6197520663098177e-06,
"loss": 0.0003,
"step": 937
},
{
"epoch": 12.28130081300813,
"grad_norm": 0.03689703560791502,
"learning_rate": 1.6042732506274939e-06,
"loss": 0.0003,
"step": 938
},
{
"epoch": 12.294308943089431,
"grad_norm": 0.01671065249876302,
"learning_rate": 1.5888622979666978e-06,
"loss": 0.0002,
"step": 939
},
{
"epoch": 12.307317073170731,
"grad_norm": 0.0296538420513258,
"learning_rate": 1.573519332894824e-06,
"loss": 0.0002,
"step": 940
},
{
"epoch": 12.320325203252033,
"grad_norm": 0.04370373558297985,
"learning_rate": 1.5582444794297225e-06,
"loss": 0.0002,
"step": 941
},
{
"epoch": 12.333333333333334,
"grad_norm": 0.03131885362602231,
"learning_rate": 1.5430378610386909e-06,
"loss": 0.0003,
"step": 942
},
{
"epoch": 12.346341463414634,
"grad_norm": 0.023644097457454247,
"learning_rate": 1.5278996006374836e-06,
"loss": 0.0002,
"step": 943
},
{
"epoch": 12.359349593495935,
"grad_norm": 0.028851288419968788,
"learning_rate": 1.5128298205893144e-06,
"loss": 0.0003,
"step": 944
},
{
"epoch": 12.372357723577236,
"grad_norm": 0.04344181321072099,
"learning_rate": 1.4978286427038602e-06,
"loss": 0.0004,
"step": 945
},
{
"epoch": 12.385365853658536,
"grad_norm": 0.031081718820231443,
"learning_rate": 1.4828961882362925e-06,
"loss": 0.0003,
"step": 946
},
{
"epoch": 12.398373983739837,
"grad_norm": 0.07692321943678358,
"learning_rate": 1.4680325778862837e-06,
"loss": 0.0007,
"step": 947
},
{
"epoch": 12.411382113821139,
"grad_norm": 0.03840200029078006,
"learning_rate": 1.453237931797038e-06,
"loss": 0.0003,
"step": 948
},
{
"epoch": 12.424390243902439,
"grad_norm": 0.02915687708744816,
"learning_rate": 1.438512369554318e-06,
"loss": 0.0002,
"step": 949
},
{
"epoch": 12.43739837398374,
"grad_norm": 0.027722748555523046,
"learning_rate": 1.4238560101854815e-06,
"loss": 0.0002,
"step": 950
},
{
"epoch": 12.450406504065041,
"grad_norm": 0.027711461854685253,
"learning_rate": 1.4092689721585052e-06,
"loss": 0.0002,
"step": 951
},
{
"epoch": 12.463414634146341,
"grad_norm": 0.026400138369060966,
"learning_rate": 1.3947513733810558e-06,
"loss": 0.0002,
"step": 952
},
{
"epoch": 12.476422764227642,
"grad_norm": 0.021296063658237975,
"learning_rate": 1.3803033311995072e-06,
"loss": 0.0002,
"step": 953
},
{
"epoch": 12.489430894308944,
"grad_norm": 0.04158455168487558,
"learning_rate": 1.3659249623980075e-06,
"loss": 0.0004,
"step": 954
},
{
"epoch": 12.502439024390243,
"grad_norm": 0.019533591657485316,
"learning_rate": 1.3516163831975337e-06,
"loss": 0.0002,
"step": 955
},
{
"epoch": 12.515447154471545,
"grad_norm": 0.023537015844786686,
"learning_rate": 1.3373777092549455e-06,
"loss": 0.0002,
"step": 956
},
{
"epoch": 12.528455284552846,
"grad_norm": 0.05740585106163819,
"learning_rate": 1.3232090556620602e-06,
"loss": 0.0005,
"step": 957
},
{
"epoch": 12.541463414634146,
"grad_norm": 0.03835919042582948,
"learning_rate": 1.3091105369447166e-06,
"loss": 0.0003,
"step": 958
},
{
"epoch": 12.554471544715447,
"grad_norm": 0.035260013086723926,
"learning_rate": 1.2950822670618491e-06,
"loss": 0.0003,
"step": 959
},
{
"epoch": 12.567479674796749,
"grad_norm": 0.12128345095801041,
"learning_rate": 1.2811243594045697e-06,
"loss": 0.0006,
"step": 960
},
{
"epoch": 12.580487804878048,
"grad_norm": 0.036532423074067316,
"learning_rate": 1.2672369267952477e-06,
"loss": 0.0002,
"step": 961
},
{
"epoch": 12.59349593495935,
"grad_norm": 0.03536480393863221,
"learning_rate": 1.2534200814865993e-06,
"loss": 0.0003,
"step": 962
},
{
"epoch": 12.60650406504065,
"grad_norm": 0.053240533731360416,
"learning_rate": 1.23967393516078e-06,
"loss": 0.0004,
"step": 963
},
{
"epoch": 12.61951219512195,
"grad_norm": 0.06059259370599514,
"learning_rate": 1.2259985989284851e-06,
"loss": 0.0004,
"step": 964
},
{
"epoch": 12.632520325203252,
"grad_norm": 0.039793466812882275,
"learning_rate": 1.2123941833280472e-06,
"loss": 0.0003,
"step": 965
},
{
"epoch": 12.645528455284552,
"grad_norm": 0.033625877863249394,
"learning_rate": 1.198860798324546e-06,
"loss": 0.0003,
"step": 966
},
{
"epoch": 12.658536585365853,
"grad_norm": 0.03217111320212353,
"learning_rate": 1.185398553308915e-06,
"loss": 0.0003,
"step": 967
},
{
"epoch": 12.671544715447155,
"grad_norm": 0.028764585766712494,
"learning_rate": 1.1720075570970602e-06,
"loss": 0.0002,
"step": 968
},
{
"epoch": 12.684552845528454,
"grad_norm": 0.029641836079438693,
"learning_rate": 1.1586879179289833e-06,
"loss": 0.0002,
"step": 969
},
{
"epoch": 12.697560975609756,
"grad_norm": 0.03960907206585507,
"learning_rate": 1.1454397434679022e-06,
"loss": 0.0003,
"step": 970
},
{
"epoch": 12.710569105691057,
"grad_norm": 0.023863283321871376,
"learning_rate": 1.132263140799381e-06,
"loss": 0.0002,
"step": 971
},
{
"epoch": 12.723577235772357,
"grad_norm": 0.05328446969958965,
"learning_rate": 1.1191582164304705e-06,
"loss": 0.0003,
"step": 972
},
{
"epoch": 12.736585365853658,
"grad_norm": 0.03669637650871495,
"learning_rate": 1.1061250762888365e-06,
"loss": 0.0003,
"step": 973
},
{
"epoch": 12.74959349593496,
"grad_norm": 0.032997154354977465,
"learning_rate": 1.0931638257219179e-06,
"loss": 0.0003,
"step": 974
},
{
"epoch": 12.76260162601626,
"grad_norm": 0.04054662663680875,
"learning_rate": 1.080274569496057e-06,
"loss": 0.0003,
"step": 975
},
{
"epoch": 12.77560975609756,
"grad_norm": 0.035856685677166364,
"learning_rate": 1.0674574117956692e-06,
"loss": 0.0003,
"step": 976
},
{
"epoch": 12.788617886178862,
"grad_norm": 0.04172495614914336,
"learning_rate": 1.0547124562223943e-06,
"loss": 0.0003,
"step": 977
},
{
"epoch": 12.801626016260162,
"grad_norm": 0.019580427416265797,
"learning_rate": 1.0420398057942581e-06,
"loss": 0.0002,
"step": 978
},
{
"epoch": 12.814634146341463,
"grad_norm": 0.03740389694894794,
"learning_rate": 1.0294395629448394e-06,
"loss": 0.0003,
"step": 979
},
{
"epoch": 12.827642276422765,
"grad_norm": 0.031803134224532056,
"learning_rate": 1.0169118295224488e-06,
"loss": 0.0003,
"step": 980
},
{
"epoch": 12.840650406504064,
"grad_norm": 0.03032456276841443,
"learning_rate": 1.0044567067892896e-06,
"loss": 0.0003,
"step": 981
},
{
"epoch": 12.853658536585366,
"grad_norm": 0.02199024409508468,
"learning_rate": 9.920742954206607e-07,
"loss": 0.0002,
"step": 982
},
{
"epoch": 12.866666666666667,
"grad_norm": 0.031972722797786526,
"learning_rate": 9.79764695504125e-07,
"loss": 0.0003,
"step": 983
},
{
"epoch": 12.879674796747967,
"grad_norm": 0.027213042256643855,
"learning_rate": 9.675280065387117e-07,
"loss": 0.0002,
"step": 984
},
{
"epoch": 12.892682926829268,
"grad_norm": 0.08563731214364351,
"learning_rate": 9.55364327434105e-07,
"loss": 0.0004,
"step": 985
},
{
"epoch": 12.90569105691057,
"grad_norm": 0.03366232448582257,
"learning_rate": 9.43273756509847e-07,
"loss": 0.0003,
"step": 986
},
{
"epoch": 12.91869918699187,
"grad_norm": 0.014805009601497832,
"learning_rate": 9.312563914945461e-07,
"loss": 0.0001,
"step": 987
},
{
"epoch": 12.93170731707317,
"grad_norm": 0.050176526307846074,
"learning_rate": 9.193123295250794e-07,
"loss": 0.0003,
"step": 988
},
{
"epoch": 12.944715447154472,
"grad_norm": 0.017531712275581913,
"learning_rate": 9.074416671458175e-07,
"loss": 0.0002,
"step": 989
},
{
"epoch": 12.957723577235772,
"grad_norm": 0.04029991292115628,
"learning_rate": 8.956445003078351e-07,
"loss": 0.0003,
"step": 990
},
{
"epoch": 12.970731707317073,
"grad_norm": 0.03487711795800321,
"learning_rate": 8.839209243681435e-07,
"loss": 0.0003,
"step": 991
},
{
"epoch": 12.983739837398375,
"grad_norm": 0.06273158241583285,
"learning_rate": 8.722710340889074e-07,
"loss": 0.0005,
"step": 992
},
{
"epoch": 12.996747967479674,
"grad_norm": 0.01994697874973447,
"learning_rate": 8.606949236366969e-07,
"loss": 0.0002,
"step": 993
},
{
"epoch": 13.009756097560976,
"grad_norm": 0.02549053991207511,
"learning_rate": 8.491926865817113e-07,
"loss": 0.0002,
"step": 994
},
{
"epoch": 13.022764227642277,
"grad_norm": 0.0307980889762381,
"learning_rate": 8.377644158970277e-07,
"loss": 0.0003,
"step": 995
},
{
"epoch": 13.035772357723577,
"grad_norm": 0.023649380232462637,
"learning_rate": 8.264102039578526e-07,
"loss": 0.0002,
"step": 996
},
{
"epoch": 13.048780487804878,
"grad_norm": 0.020221083840776814,
"learning_rate": 8.151301425407699e-07,
"loss": 0.0002,
"step": 997
},
{
"epoch": 13.06178861788618,
"grad_norm": 0.03926025654030983,
"learning_rate": 8.039243228230043e-07,
"loss": 0.0003,
"step": 998
},
{
"epoch": 13.07479674796748,
"grad_norm": 0.020449515371978706,
"learning_rate": 7.927928353816782e-07,
"loss": 0.0002,
"step": 999
},
{
"epoch": 13.08780487804878,
"grad_norm": 0.02932084253359567,
"learning_rate": 7.81735770193085e-07,
"loss": 0.0003,
"step": 1000
},
{
"epoch": 13.100813008130082,
"grad_norm": 0.024112902281580206,
"learning_rate": 7.707532166319598e-07,
"loss": 0.0002,
"step": 1001
},
{
"epoch": 13.113821138211382,
"grad_norm": 0.025977086642222053,
"learning_rate": 7.598452634707576e-07,
"loss": 0.0003,
"step": 1002
},
{
"epoch": 13.126829268292683,
"grad_norm": 0.03171766941376975,
"learning_rate": 7.490119988789346e-07,
"loss": 0.0002,
"step": 1003
},
{
"epoch": 13.139837398373984,
"grad_norm": 0.024337316023638367,
"learning_rate": 7.382535104222366e-07,
"loss": 0.0002,
"step": 1004
},
{
"epoch": 13.152845528455284,
"grad_norm": 0.029670754329652924,
"learning_rate": 7.275698850619861e-07,
"loss": 0.0003,
"step": 1005
},
{
"epoch": 13.165853658536586,
"grad_norm": 0.03834238404585399,
"learning_rate": 7.169612091543909e-07,
"loss": 0.0003,
"step": 1006
},
{
"epoch": 13.178861788617887,
"grad_norm": 0.034469653100517236,
"learning_rate": 7.064275684498357e-07,
"loss": 0.0003,
"step": 1007
},
{
"epoch": 13.191869918699187,
"grad_norm": 0.031749002558175,
"learning_rate": 6.959690480921922e-07,
"loss": 0.0003,
"step": 1008
},
{
"epoch": 13.204878048780488,
"grad_norm": 0.025417220142589323,
"learning_rate": 6.855857326181315e-07,
"loss": 0.0002,
"step": 1009
},
{
"epoch": 13.21788617886179,
"grad_norm": 0.06077297341714978,
"learning_rate": 6.752777059564431e-07,
"loss": 0.0005,
"step": 1010
},
{
"epoch": 13.230894308943089,
"grad_norm": 0.05072001838936607,
"learning_rate": 6.650450514273476e-07,
"loss": 0.0004,
"step": 1011
},
{
"epoch": 13.24390243902439,
"grad_norm": 0.03387119754903101,
"learning_rate": 6.548878517418345e-07,
"loss": 0.0003,
"step": 1012
},
{
"epoch": 13.256910569105692,
"grad_norm": 0.10756327605409695,
"learning_rate": 6.448061890009872e-07,
"loss": 0.0006,
"step": 1013
},
{
"epoch": 13.269918699186992,
"grad_norm": 0.029678540021646233,
"learning_rate": 6.348001446953201e-07,
"loss": 0.0002,
"step": 1014
},
{
"epoch": 13.282926829268293,
"grad_norm": 0.04013972159611284,
"learning_rate": 6.248697997041219e-07,
"loss": 0.0003,
"step": 1015
},
{
"epoch": 13.295934959349594,
"grad_norm": 0.019077151556142806,
"learning_rate": 6.15015234294799e-07,
"loss": 0.0002,
"step": 1016
},
{
"epoch": 13.308943089430894,
"grad_norm": 0.016650919618137604,
"learning_rate": 6.052365281222306e-07,
"loss": 0.0001,
"step": 1017
},
{
"epoch": 13.321951219512195,
"grad_norm": 0.020689154219598792,
"learning_rate": 5.955337602281164e-07,
"loss": 0.0001,
"step": 1018
},
{
"epoch": 13.334959349593497,
"grad_norm": 0.024136957498527598,
"learning_rate": 5.859070090403507e-07,
"loss": 0.0002,
"step": 1019
},
{
"epoch": 13.347967479674796,
"grad_norm": 0.029919824327203366,
"learning_rate": 5.763563523723769e-07,
"loss": 0.0002,
"step": 1020
},
{
"epoch": 13.360975609756098,
"grad_norm": 0.034164027668019015,
"learning_rate": 5.668818674225684e-07,
"loss": 0.0003,
"step": 1021
},
{
"epoch": 13.373983739837398,
"grad_norm": 0.02468655474347108,
"learning_rate": 5.574836307735942e-07,
"loss": 0.0002,
"step": 1022
},
{
"epoch": 13.386991869918699,
"grad_norm": 0.043140945132677466,
"learning_rate": 5.481617183918053e-07,
"loss": 0.0004,
"step": 1023
},
{
"epoch": 13.4,
"grad_norm": 0.03878222674386918,
"learning_rate": 5.389162056266217e-07,
"loss": 0.0003,
"step": 1024
},
{
"epoch": 13.4130081300813,
"grad_norm": 0.025044038832899608,
"learning_rate": 5.29747167209923e-07,
"loss": 0.0003,
"step": 1025
},
{
"epoch": 13.426016260162601,
"grad_norm": 0.019828237699485142,
"learning_rate": 5.206546772554421e-07,
"loss": 0.0001,
"step": 1026
},
{
"epoch": 13.439024390243903,
"grad_norm": 0.021879669223526684,
"learning_rate": 5.11638809258167e-07,
"loss": 0.0002,
"step": 1027
},
{
"epoch": 13.452032520325202,
"grad_norm": 0.03618577826106415,
"learning_rate": 5.026996360937497e-07,
"loss": 0.0003,
"step": 1028
},
{
"epoch": 13.465040650406504,
"grad_norm": 0.060859060269609425,
"learning_rate": 4.938372300179095e-07,
"loss": 0.0004,
"step": 1029
},
{
"epoch": 13.478048780487805,
"grad_norm": 0.022248539562761523,
"learning_rate": 4.850516626658585e-07,
"loss": 0.0002,
"step": 1030
},
{
"epoch": 13.491056910569105,
"grad_norm": 0.017508560411216564,
"learning_rate": 4.7634300505171706e-07,
"loss": 0.0002,
"step": 1031
},
{
"epoch": 13.504065040650406,
"grad_norm": 0.030861709855513904,
"learning_rate": 4.6771132756794014e-07,
"loss": 0.0003,
"step": 1032
},
{
"epoch": 13.517073170731708,
"grad_norm": 0.031550978453916724,
"learning_rate": 4.591566999847485e-07,
"loss": 0.0003,
"step": 1033
},
{
"epoch": 13.530081300813007,
"grad_norm": 0.019490216675892496,
"learning_rate": 4.5067919144956786e-07,
"loss": 0.0002,
"step": 1034
},
{
"epoch": 13.543089430894309,
"grad_norm": 0.018780461745252785,
"learning_rate": 4.4227887048646335e-07,
"loss": 0.0002,
"step": 1035
},
{
"epoch": 13.55609756097561,
"grad_norm": 0.023017234749854912,
"learning_rate": 4.3395580499559276e-07,
"loss": 0.0002,
"step": 1036
},
{
"epoch": 13.56910569105691,
"grad_norm": 0.04625964724403267,
"learning_rate": 4.257100622526522e-07,
"loss": 0.0003,
"step": 1037
},
{
"epoch": 13.582113821138211,
"grad_norm": 0.021183619605396154,
"learning_rate": 4.1754170890833777e-07,
"loss": 0.0002,
"step": 1038
},
{
"epoch": 13.595121951219513,
"grad_norm": 0.019814070943899136,
"learning_rate": 4.094508109878015e-07,
"loss": 0.0002,
"step": 1039
},
{
"epoch": 13.608130081300812,
"grad_norm": 0.10482935924047573,
"learning_rate": 4.014374338901206e-07,
"loss": 0.0005,
"step": 1040
},
{
"epoch": 13.621138211382114,
"grad_norm": 0.029873361045492864,
"learning_rate": 3.935016423877669e-07,
"loss": 0.0002,
"step": 1041
},
{
"epoch": 13.634146341463415,
"grad_norm": 0.026330571763574936,
"learning_rate": 3.8564350062608614e-07,
"loss": 0.0003,
"step": 1042
},
{
"epoch": 13.647154471544715,
"grad_norm": 0.04793123621711193,
"learning_rate": 3.7786307212277605e-07,
"loss": 0.0004,
"step": 1043
},
{
"epoch": 13.660162601626016,
"grad_norm": 0.029028943314858512,
"learning_rate": 3.7016041976737803e-07,
"loss": 0.0002,
"step": 1044
},
{
"epoch": 13.673170731707318,
"grad_norm": 0.015808414804027004,
"learning_rate": 3.6253560582076075e-07,
"loss": 0.0001,
"step": 1045
},
{
"epoch": 13.686178861788617,
"grad_norm": 0.04926046336983506,
"learning_rate": 3.549886919146273e-07,
"loss": 0.0004,
"step": 1046
},
{
"epoch": 13.699186991869919,
"grad_norm": 0.019757178400023927,
"learning_rate": 3.4751973905100656e-07,
"loss": 0.0002,
"step": 1047
},
{
"epoch": 13.71219512195122,
"grad_norm": 0.02129644550912768,
"learning_rate": 3.4012880760176723e-07,
"loss": 0.0002,
"step": 1048
},
{
"epoch": 13.72520325203252,
"grad_norm": 0.03242801353349851,
"learning_rate": 3.328159573081258e-07,
"loss": 0.0003,
"step": 1049
},
{
"epoch": 13.738211382113821,
"grad_norm": 0.02075180131439445,
"learning_rate": 3.255812472801689e-07,
"loss": 0.0002,
"step": 1050
},
{
"epoch": 13.751219512195123,
"grad_norm": 0.036189659636635424,
"learning_rate": 3.184247359963677e-07,
"loss": 0.0003,
"step": 1051
},
{
"epoch": 13.764227642276422,
"grad_norm": 0.038577208451691616,
"learning_rate": 3.1134648130311305e-07,
"loss": 0.0003,
"step": 1052
},
{
"epoch": 13.777235772357724,
"grad_norm": 0.019578026825799153,
"learning_rate": 3.0434654041424315e-07,
"loss": 0.0002,
"step": 1053
},
{
"epoch": 13.790243902439025,
"grad_norm": 0.038217555100736766,
"learning_rate": 2.974249699105802e-07,
"loss": 0.0003,
"step": 1054
},
{
"epoch": 13.803252032520325,
"grad_norm": 0.037397444857588134,
"learning_rate": 2.905818257394799e-07,
"loss": 0.0002,
"step": 1055
},
{
"epoch": 13.816260162601626,
"grad_norm": 0.016900876285998463,
"learning_rate": 2.8381716321436936e-07,
"loss": 0.0001,
"step": 1056
},
{
"epoch": 13.829268292682928,
"grad_norm": 0.0203693368004702,
"learning_rate": 2.771310370143099e-07,
"loss": 0.0002,
"step": 1057
},
{
"epoch": 13.842276422764227,
"grad_norm": 0.016405347132121117,
"learning_rate": 2.705235011835472e-07,
"loss": 0.0002,
"step": 1058
},
{
"epoch": 13.855284552845529,
"grad_norm": 0.025549581414522154,
"learning_rate": 2.639946091310786e-07,
"loss": 0.0002,
"step": 1059
},
{
"epoch": 13.86829268292683,
"grad_norm": 0.04227039234601936,
"learning_rate": 2.5754441363021854e-07,
"loss": 0.0004,
"step": 1060
},
{
"epoch": 13.88130081300813,
"grad_norm": 0.02941832608756205,
"learning_rate": 2.511729668181773e-07,
"loss": 0.0003,
"step": 1061
},
{
"epoch": 13.894308943089431,
"grad_norm": 0.023422564676835966,
"learning_rate": 2.44880320195634e-07,
"loss": 0.0002,
"step": 1062
},
{
"epoch": 13.907317073170733,
"grad_norm": 0.027871785358745793,
"learning_rate": 2.38666524626322e-07,
"loss": 0.0002,
"step": 1063
},
{
"epoch": 13.920325203252032,
"grad_norm": 0.02968460669625177,
"learning_rate": 2.3253163033662097e-07,
"loss": 0.0002,
"step": 1064
},
{
"epoch": 13.933333333333334,
"grad_norm": 0.0565129740043272,
"learning_rate": 2.264756869151441e-07,
"loss": 0.0005,
"step": 1065
},
{
"epoch": 13.946341463414633,
"grad_norm": 0.018101006672774193,
"learning_rate": 2.2049874331234289e-07,
"loss": 0.0002,
"step": 1066
},
{
"epoch": 13.959349593495935,
"grad_norm": 0.045688843343597706,
"learning_rate": 2.1460084784011293e-07,
"loss": 0.0003,
"step": 1067
},
{
"epoch": 13.972357723577236,
"grad_norm": 0.05940289947656419,
"learning_rate": 2.0878204817139536e-07,
"loss": 0.0003,
"step": 1068
},
{
"epoch": 13.985365853658536,
"grad_norm": 0.021621769556400938,
"learning_rate": 2.0304239133980052e-07,
"loss": 0.0002,
"step": 1069
},
{
"epoch": 13.998373983739837,
"grad_norm": 0.02309363700179012,
"learning_rate": 1.973819237392205e-07,
"loss": 0.0002,
"step": 1070
},
{
"epoch": 14.011382113821139,
"grad_norm": 0.027500933714689976,
"learning_rate": 1.9180069112346157e-07,
"loss": 0.0003,
"step": 1071
},
{
"epoch": 14.024390243902438,
"grad_norm": 0.030244307231598946,
"learning_rate": 1.8629873860586567e-07,
"loss": 0.0002,
"step": 1072
},
{
"epoch": 14.03739837398374,
"grad_norm": 0.03019993808409305,
"learning_rate": 1.8087611065895295e-07,
"loss": 0.0002,
"step": 1073
},
{
"epoch": 14.050406504065041,
"grad_norm": 0.031140680127246946,
"learning_rate": 1.755328511140597e-07,
"loss": 0.0003,
"step": 1074
},
{
"epoch": 14.06341463414634,
"grad_norm": 0.04172006896705525,
"learning_rate": 1.7026900316098217e-07,
"loss": 0.0003,
"step": 1075
},
{
"epoch": 14.076422764227642,
"grad_norm": 0.025305355483100058,
"learning_rate": 1.6508460934763104e-07,
"loss": 0.0002,
"step": 1076
},
{
"epoch": 14.089430894308943,
"grad_norm": 0.06505527132645876,
"learning_rate": 1.599797115796864e-07,
"loss": 0.0003,
"step": 1077
},
{
"epoch": 14.102439024390243,
"grad_norm": 0.021875170909544356,
"learning_rate": 1.549543511202556e-07,
"loss": 0.0002,
"step": 1078
},
{
"epoch": 14.115447154471545,
"grad_norm": 0.028820932133379662,
"learning_rate": 1.500085685895436e-07,
"loss": 0.0002,
"step": 1079
},
{
"epoch": 14.128455284552846,
"grad_norm": 0.06547214698811654,
"learning_rate": 1.4514240396452438e-07,
"loss": 0.0005,
"step": 1080
},
{
"epoch": 14.141463414634146,
"grad_norm": 0.022084226183417356,
"learning_rate": 1.4035589657861782e-07,
"loss": 0.0002,
"step": 1081
},
{
"epoch": 14.154471544715447,
"grad_norm": 0.030501794381445683,
"learning_rate": 1.3564908512136877e-07,
"loss": 0.0003,
"step": 1082
},
{
"epoch": 14.167479674796748,
"grad_norm": 0.05544690283648963,
"learning_rate": 1.3102200763813744e-07,
"loss": 0.0004,
"step": 1083
},
{
"epoch": 14.180487804878048,
"grad_norm": 0.024801402394138795,
"learning_rate": 1.2647470152979068e-07,
"loss": 0.0002,
"step": 1084
},
{
"epoch": 14.19349593495935,
"grad_norm": 0.02667042819572207,
"learning_rate": 1.2200720355239893e-07,
"loss": 0.0002,
"step": 1085
},
{
"epoch": 14.20650406504065,
"grad_norm": 0.0285111878316447,
"learning_rate": 1.1761954981694301e-07,
"loss": 0.0003,
"step": 1086
},
{
"epoch": 14.21951219512195,
"grad_norm": 0.019804417639668098,
"learning_rate": 1.1331177578901564e-07,
"loss": 0.0002,
"step": 1087
},
{
"epoch": 14.232520325203252,
"grad_norm": 0.01747656276935746,
"learning_rate": 1.0908391628854042e-07,
"loss": 0.0001,
"step": 1088
},
{
"epoch": 14.245528455284553,
"grad_norm": 0.02058245422053517,
"learning_rate": 1.0493600548948879e-07,
"loss": 0.0002,
"step": 1089
},
{
"epoch": 14.258536585365853,
"grad_norm": 0.03829122889093893,
"learning_rate": 1.0086807691960243e-07,
"loss": 0.0003,
"step": 1090
},
{
"epoch": 14.271544715447154,
"grad_norm": 0.03158787600436918,
"learning_rate": 9.688016346012463e-08,
"loss": 0.0003,
"step": 1091
},
{
"epoch": 14.284552845528456,
"grad_norm": 0.01894562834232925,
"learning_rate": 9.297229734552937e-08,
"loss": 0.0002,
"step": 1092
},
{
"epoch": 14.297560975609755,
"grad_norm": 0.039139900460036486,
"learning_rate": 8.914451016326931e-08,
"loss": 0.0003,
"step": 1093
},
{
"epoch": 14.310569105691057,
"grad_norm": 0.02194935008727504,
"learning_rate": 8.539683285351152e-08,
"loss": 0.0002,
"step": 1094
},
{
"epoch": 14.323577235772358,
"grad_norm": 0.0316393239011527,
"learning_rate": 8.172929570889553e-08,
"loss": 0.0003,
"step": 1095
},
{
"epoch": 14.336585365853658,
"grad_norm": 0.04169887849957978,
"learning_rate": 7.814192837428126e-08,
"loss": 0.0004,
"step": 1096
},
{
"epoch": 14.34959349593496,
"grad_norm": 0.02965671174590213,
"learning_rate": 7.463475984651248e-08,
"loss": 0.0003,
"step": 1097
},
{
"epoch": 14.36260162601626,
"grad_norm": 0.04111474695069758,
"learning_rate": 7.12078184741849e-08,
"loss": 0.0003,
"step": 1098
},
{
"epoch": 14.37560975609756,
"grad_norm": 0.04266172001967759,
"learning_rate": 6.786113195741406e-08,
"loss": 0.0004,
"step": 1099
},
{
"epoch": 14.388617886178862,
"grad_norm": 0.019013161463080158,
"learning_rate": 6.459472734760997e-08,
"loss": 0.0002,
"step": 1100
},
{
"epoch": 14.401626016260163,
"grad_norm": 0.08610350627610622,
"learning_rate": 6.140863104726391e-08,
"loss": 0.0005,
"step": 1101
},
{
"epoch": 14.414634146341463,
"grad_norm": 0.029853636365563475,
"learning_rate": 5.830286880972758e-08,
"loss": 0.0002,
"step": 1102
},
{
"epoch": 14.427642276422764,
"grad_norm": 0.037999272268641124,
"learning_rate": 5.5277465739012045e-08,
"loss": 0.0003,
"step": 1103
},
{
"epoch": 14.440650406504066,
"grad_norm": 0.029356112058825358,
"learning_rate": 5.233244628958134e-08,
"loss": 0.0002,
"step": 1104
},
{
"epoch": 14.453658536585365,
"grad_norm": 0.020414043886620863,
"learning_rate": 4.9467834266154756e-08,
"loss": 0.0002,
"step": 1105
},
{
"epoch": 14.466666666666667,
"grad_norm": 0.05295425203318339,
"learning_rate": 4.6683652823513725e-08,
"loss": 0.0005,
"step": 1106
},
{
"epoch": 14.479674796747968,
"grad_norm": 0.03061975529593269,
"learning_rate": 4.39799244663186e-08,
"loss": 0.0003,
"step": 1107
},
{
"epoch": 14.492682926829268,
"grad_norm": 0.03505927802669296,
"learning_rate": 4.135667104892105e-08,
"loss": 0.0003,
"step": 1108
},
{
"epoch": 14.50569105691057,
"grad_norm": 0.01569899988684874,
"learning_rate": 3.881391377519084e-08,
"loss": 0.0001,
"step": 1109
},
{
"epoch": 14.51869918699187,
"grad_norm": 0.04463930655495351,
"learning_rate": 3.635167319834709e-08,
"loss": 0.0003,
"step": 1110
},
{
"epoch": 14.53170731707317,
"grad_norm": 0.01592995076247383,
"learning_rate": 3.3969969220785106e-08,
"loss": 0.0002,
"step": 1111
},
{
"epoch": 14.544715447154472,
"grad_norm": 0.026599290272323235,
"learning_rate": 3.166882109392089e-08,
"loss": 0.0002,
"step": 1112
},
{
"epoch": 14.557723577235773,
"grad_norm": 0.029761775138282494,
"learning_rate": 2.944824741803576e-08,
"loss": 0.0003,
"step": 1113
},
{
"epoch": 14.570731707317073,
"grad_norm": 0.03744078298569363,
"learning_rate": 2.7308266142119788e-08,
"loss": 0.0003,
"step": 1114
},
{
"epoch": 14.583739837398374,
"grad_norm": 0.013773818583414988,
"learning_rate": 2.524889456373525e-08,
"loss": 0.0001,
"step": 1115
},
{
"epoch": 14.596747967479676,
"grad_norm": 0.03608666336236024,
"learning_rate": 2.327014932887228e-08,
"loss": 0.0003,
"step": 1116
},
{
"epoch": 14.609756097560975,
"grad_norm": 0.02230891276147283,
"learning_rate": 2.1372046431812343e-08,
"loss": 0.0002,
"step": 1117
},
{
"epoch": 14.622764227642277,
"grad_norm": 0.02345370178511967,
"learning_rate": 1.9554601215003856e-08,
"loss": 0.0002,
"step": 1118
},
{
"epoch": 14.635772357723578,
"grad_norm": 0.01912942327167438,
"learning_rate": 1.7817828368935642e-08,
"loss": 0.0002,
"step": 1119
},
{
"epoch": 14.648780487804878,
"grad_norm": 0.06174519769536573,
"learning_rate": 1.6161741932017026e-08,
"loss": 0.0004,
"step": 1120
},
{
"epoch": 14.66178861788618,
"grad_norm": 0.018949965702635463,
"learning_rate": 1.4586355290464593e-08,
"loss": 0.0002,
"step": 1121
},
{
"epoch": 14.67479674796748,
"grad_norm": 0.02458718649975269,
"learning_rate": 1.3091681178198922e-08,
"loss": 0.0002,
"step": 1122
},
{
"epoch": 14.68780487804878,
"grad_norm": 0.02215083696617675,
"learning_rate": 1.1677731676733584e-08,
"loss": 0.0002,
"step": 1123
},
{
"epoch": 14.700813008130082,
"grad_norm": 0.021477017570415178,
"learning_rate": 1.0344518215085198e-08,
"loss": 0.0002,
"step": 1124
},
{
"epoch": 14.713821138211383,
"grad_norm": 0.04377200578845525,
"learning_rate": 9.092051569674632e-09,
"loss": 0.0003,
"step": 1125
},
{
"epoch": 14.726829268292683,
"grad_norm": 0.028085914628265037,
"learning_rate": 7.92034186424262e-09,
"loss": 0.0002,
"step": 1126
},
{
"epoch": 14.739837398373984,
"grad_norm": 0.019067828968739185,
"learning_rate": 6.82939856977094e-09,
"loss": 0.0002,
"step": 1127
},
{
"epoch": 14.752845528455284,
"grad_norm": 0.015128591405197895,
"learning_rate": 5.819230504401363e-09,
"loss": 0.0001,
"step": 1128
},
{
"epoch": 14.765853658536585,
"grad_norm": 0.02247482737283179,
"learning_rate": 4.889845833364604e-09,
"loss": 0.0002,
"step": 1129
},
{
"epoch": 14.778861788617887,
"grad_norm": 0.04000762766042668,
"learning_rate": 4.041252068918145e-09,
"loss": 0.0003,
"step": 1130
},
{
"epoch": 14.791869918699186,
"grad_norm": 0.02929267821314836,
"learning_rate": 3.273456070281844e-09,
"loss": 0.0003,
"step": 1131
},
{
"epoch": 14.804878048780488,
"grad_norm": 0.016502659240903488,
"learning_rate": 2.5864640435835362e-09,
"loss": 0.0001,
"step": 1132
},
{
"epoch": 14.817886178861789,
"grad_norm": 0.020456975306716727,
"learning_rate": 1.9802815418101805e-09,
"loss": 0.0002,
"step": 1133
},
{
"epoch": 14.830894308943089,
"grad_norm": 0.037623013210518384,
"learning_rate": 1.4549134647601215e-09,
"loss": 0.0003,
"step": 1134
},
{
"epoch": 14.84390243902439,
"grad_norm": 0.028791151142816777,
"learning_rate": 1.0103640590064524e-09,
"loss": 0.0003,
"step": 1135
},
{
"epoch": 14.856910569105692,
"grad_norm": 0.028158572046181538,
"learning_rate": 6.466369178614873e-10,
"loss": 0.0002,
"step": 1136
},
{
"epoch": 14.869918699186991,
"grad_norm": 0.02706991421217001,
"learning_rate": 3.637349813467861e-10,
"loss": 0.0002,
"step": 1137
},
{
"epoch": 14.882926829268293,
"grad_norm": 0.051082395177727204,
"learning_rate": 1.616605361720591e-10,
"loss": 0.0002,
"step": 1138
},
{
"epoch": 14.895934959349594,
"grad_norm": 0.028377159346977227,
"learning_rate": 4.041521571296336e-11,
"loss": 0.0002,
"step": 1139
},
{
"epoch": 14.908943089430894,
"grad_norm": 0.03112857719903251,
"learning_rate": 0.0,
"loss": 0.0003,
"step": 1140
},
{
"epoch": 14.908943089430894,
"step": 1140,
"total_flos": 650452104151040.0,
"train_loss": 0.0012602176121027586,
"train_runtime": 17614.0367,
"train_samples_per_second": 8.375,
"train_steps_per_second": 0.065
}
],
"logging_steps": 1.0,
"max_steps": 1140,
"num_input_tokens_seen": 0,
"num_train_epochs": 15,
"save_steps": 150,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 650452104151040.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}