InjecAgent-vicuna-7b-v1.5-15 / trainer_state.json
henilp105's picture
Upload folder using huggingface_hub
80bd4f1 verified
raw
history blame
62 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 15.0,
"eval_steps": 500,
"global_step": 3525,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0425531914893617,
"grad_norm": 0.4270687699317932,
"learning_rate": 0.00019999602855426865,
"loss": 1.013,
"step": 10
},
{
"epoch": 0.0851063829787234,
"grad_norm": 0.4152718782424927,
"learning_rate": 0.00019998411453252217,
"loss": 0.8289,
"step": 20
},
{
"epoch": 0.1276595744680851,
"grad_norm": 0.7277560234069824,
"learning_rate": 0.0001999642588810784,
"loss": 0.5959,
"step": 30
},
{
"epoch": 0.1702127659574468,
"grad_norm": 0.5505673885345459,
"learning_rate": 0.00019993646317705016,
"loss": 0.459,
"step": 40
},
{
"epoch": 0.2127659574468085,
"grad_norm": 0.528052031993866,
"learning_rate": 0.00019990072962822007,
"loss": 0.3775,
"step": 50
},
{
"epoch": 0.2553191489361702,
"grad_norm": 0.6307681202888489,
"learning_rate": 0.00019985706107286514,
"loss": 0.3285,
"step": 60
},
{
"epoch": 0.2978723404255319,
"grad_norm": 0.6954013109207153,
"learning_rate": 0.00019980546097953132,
"loss": 0.2855,
"step": 70
},
{
"epoch": 0.3404255319148936,
"grad_norm": 0.6790465116500854,
"learning_rate": 0.000199745933446758,
"loss": 0.2782,
"step": 80
},
{
"epoch": 0.3829787234042553,
"grad_norm": 1.324937105178833,
"learning_rate": 0.0001996784832027525,
"loss": 0.2635,
"step": 90
},
{
"epoch": 0.425531914893617,
"grad_norm": 0.8779314756393433,
"learning_rate": 0.00019960311560501454,
"loss": 0.1861,
"step": 100
},
{
"epoch": 0.46808510638297873,
"grad_norm": 0.693745493888855,
"learning_rate": 0.00019951983663991056,
"loss": 0.2001,
"step": 110
},
{
"epoch": 0.5106382978723404,
"grad_norm": 1.0649502277374268,
"learning_rate": 0.00019942865292219838,
"loss": 0.1378,
"step": 120
},
{
"epoch": 0.5531914893617021,
"grad_norm": 0.6962260007858276,
"learning_rate": 0.0001993295716945017,
"loss": 0.1579,
"step": 130
},
{
"epoch": 0.5957446808510638,
"grad_norm": 0.7934479713439941,
"learning_rate": 0.00019922260082673497,
"loss": 0.092,
"step": 140
},
{
"epoch": 0.6382978723404256,
"grad_norm": 1.1331907510757446,
"learning_rate": 0.000199107748815478,
"loss": 0.1208,
"step": 150
},
{
"epoch": 0.6808510638297872,
"grad_norm": 1.3689247369766235,
"learning_rate": 0.00019898502478330152,
"loss": 0.0874,
"step": 160
},
{
"epoch": 0.723404255319149,
"grad_norm": 0.5304535031318665,
"learning_rate": 0.00019885443847804211,
"loss": 0.0881,
"step": 170
},
{
"epoch": 0.7659574468085106,
"grad_norm": 0.6805845499038696,
"learning_rate": 0.0001987160002720283,
"loss": 0.0584,
"step": 180
},
{
"epoch": 0.8085106382978723,
"grad_norm": 0.2527499198913574,
"learning_rate": 0.00019856972116125653,
"loss": 0.08,
"step": 190
},
{
"epoch": 0.851063829787234,
"grad_norm": 0.799462616443634,
"learning_rate": 0.0001984156127645178,
"loss": 0.0556,
"step": 200
},
{
"epoch": 0.8936170212765957,
"grad_norm": 0.936975359916687,
"learning_rate": 0.0001982536873224748,
"loss": 0.0945,
"step": 210
},
{
"epoch": 0.9361702127659575,
"grad_norm": 0.8067993521690369,
"learning_rate": 0.00019808395769668963,
"loss": 0.0495,
"step": 220
},
{
"epoch": 0.9787234042553191,
"grad_norm": 0.45767834782600403,
"learning_rate": 0.00019790643736860227,
"loss": 0.0617,
"step": 230
},
{
"epoch": 1.0212765957446808,
"grad_norm": 0.9198794364929199,
"learning_rate": 0.00019772114043845965,
"loss": 0.0467,
"step": 240
},
{
"epoch": 1.0638297872340425,
"grad_norm": 0.7327796816825867,
"learning_rate": 0.0001975280816241959,
"loss": 0.0391,
"step": 250
},
{
"epoch": 1.1063829787234043,
"grad_norm": 0.8003076910972595,
"learning_rate": 0.00019732727626026305,
"loss": 0.0428,
"step": 260
},
{
"epoch": 1.148936170212766,
"grad_norm": 0.10251367837190628,
"learning_rate": 0.0001971187402964132,
"loss": 0.032,
"step": 270
},
{
"epoch": 1.1914893617021276,
"grad_norm": 0.45093855261802673,
"learning_rate": 0.00019690249029643162,
"loss": 0.0673,
"step": 280
},
{
"epoch": 1.2340425531914894,
"grad_norm": 0.4845767915248871,
"learning_rate": 0.0001966785434368211,
"loss": 0.033,
"step": 290
},
{
"epoch": 1.2765957446808511,
"grad_norm": 0.31195056438446045,
"learning_rate": 0.00019644691750543767,
"loss": 0.0261,
"step": 300
},
{
"epoch": 1.3191489361702127,
"grad_norm": 0.14839951694011688,
"learning_rate": 0.00019620763090007762,
"loss": 0.0298,
"step": 310
},
{
"epoch": 1.3617021276595744,
"grad_norm": 0.20573872327804565,
"learning_rate": 0.00019596070262701626,
"loss": 0.0155,
"step": 320
},
{
"epoch": 1.4042553191489362,
"grad_norm": 0.47702595591545105,
"learning_rate": 0.00019570615229949842,
"loss": 0.0369,
"step": 330
},
{
"epoch": 1.4468085106382977,
"grad_norm": 0.7073186039924622,
"learning_rate": 0.00019544400013618023,
"loss": 0.0302,
"step": 340
},
{
"epoch": 1.4893617021276595,
"grad_norm": 0.1539478451013565,
"learning_rate": 0.00019517426695952358,
"loss": 0.0223,
"step": 350
},
{
"epoch": 1.5319148936170213,
"grad_norm": 0.5202814340591431,
"learning_rate": 0.00019489697419414182,
"loss": 0.0263,
"step": 360
},
{
"epoch": 1.574468085106383,
"grad_norm": 0.968192458152771,
"learning_rate": 0.00019461214386509842,
"loss": 0.044,
"step": 370
},
{
"epoch": 1.6170212765957448,
"grad_norm": 0.5662522912025452,
"learning_rate": 0.00019431979859615726,
"loss": 0.0421,
"step": 380
},
{
"epoch": 1.6595744680851063,
"grad_norm": 0.42925137281417847,
"learning_rate": 0.00019401996160798573,
"loss": 0.0606,
"step": 390
},
{
"epoch": 1.702127659574468,
"grad_norm": 0.5803830027580261,
"learning_rate": 0.00019371265671631037,
"loss": 0.0392,
"step": 400
},
{
"epoch": 1.7446808510638299,
"grad_norm": 0.4235450327396393,
"learning_rate": 0.00019339790833002515,
"loss": 0.0286,
"step": 410
},
{
"epoch": 1.7872340425531914,
"grad_norm": 0.519207775592804,
"learning_rate": 0.00019307574144925287,
"loss": 0.0522,
"step": 420
},
{
"epoch": 1.8297872340425532,
"grad_norm": 0.2344844490289688,
"learning_rate": 0.00019274618166335912,
"loss": 0.0281,
"step": 430
},
{
"epoch": 1.872340425531915,
"grad_norm": 0.1990007758140564,
"learning_rate": 0.00019240925514892,
"loss": 0.0229,
"step": 440
},
{
"epoch": 1.9148936170212765,
"grad_norm": 0.10929415374994278,
"learning_rate": 0.00019206498866764288,
"loss": 0.0258,
"step": 450
},
{
"epoch": 1.9574468085106385,
"grad_norm": 0.4308103024959564,
"learning_rate": 0.00019171340956424074,
"loss": 0.0167,
"step": 460
},
{
"epoch": 2.0,
"grad_norm": 0.46525439620018005,
"learning_rate": 0.0001913545457642601,
"loss": 0.0283,
"step": 470
},
{
"epoch": 2.0425531914893615,
"grad_norm": 0.28837406635284424,
"learning_rate": 0.00019098842577186314,
"loss": 0.0137,
"step": 480
},
{
"epoch": 2.0851063829787235,
"grad_norm": 0.08533861488103867,
"learning_rate": 0.00019061507866756347,
"loss": 0.0182,
"step": 490
},
{
"epoch": 2.127659574468085,
"grad_norm": 0.3499375581741333,
"learning_rate": 0.00019023453410591635,
"loss": 0.0221,
"step": 500
},
{
"epoch": 2.1702127659574466,
"grad_norm": 0.07716694474220276,
"learning_rate": 0.00018984682231316333,
"loss": 0.0075,
"step": 510
},
{
"epoch": 2.2127659574468086,
"grad_norm": 0.3093757927417755,
"learning_rate": 0.00018945197408483123,
"loss": 0.0133,
"step": 520
},
{
"epoch": 2.25531914893617,
"grad_norm": 0.13492655754089355,
"learning_rate": 0.00018905002078328632,
"loss": 0.0184,
"step": 530
},
{
"epoch": 2.297872340425532,
"grad_norm": 0.07833054661750793,
"learning_rate": 0.000188640994335243,
"loss": 0.0109,
"step": 540
},
{
"epoch": 2.3404255319148937,
"grad_norm": 0.08865915983915329,
"learning_rate": 0.0001882249272292282,
"loss": 0.0121,
"step": 550
},
{
"epoch": 2.382978723404255,
"grad_norm": 0.31314581632614136,
"learning_rate": 0.00018780185251300046,
"loss": 0.0242,
"step": 560
},
{
"epoch": 2.425531914893617,
"grad_norm": 0.10387410968542099,
"learning_rate": 0.00018737180379092537,
"loss": 0.0285,
"step": 570
},
{
"epoch": 2.4680851063829787,
"grad_norm": 0.11755700409412384,
"learning_rate": 0.0001869348152213061,
"loss": 0.0281,
"step": 580
},
{
"epoch": 2.5106382978723403,
"grad_norm": 0.1438552439212799,
"learning_rate": 0.0001864909215136705,
"loss": 0.0216,
"step": 590
},
{
"epoch": 2.5531914893617023,
"grad_norm": 0.08940370380878448,
"learning_rate": 0.00018604015792601396,
"loss": 0.0226,
"step": 600
},
{
"epoch": 2.595744680851064,
"grad_norm": 0.19193390011787415,
"learning_rate": 0.00018558256026199896,
"loss": 0.0184,
"step": 610
},
{
"epoch": 2.6382978723404253,
"grad_norm": 0.4586654603481293,
"learning_rate": 0.00018511816486811134,
"loss": 0.0154,
"step": 620
},
{
"epoch": 2.6808510638297873,
"grad_norm": 0.05811255797743797,
"learning_rate": 0.00018464700863077312,
"loss": 0.0238,
"step": 630
},
{
"epoch": 2.723404255319149,
"grad_norm": 0.16280680894851685,
"learning_rate": 0.00018416912897341295,
"loss": 0.0253,
"step": 640
},
{
"epoch": 2.7659574468085104,
"grad_norm": 0.07031189650297165,
"learning_rate": 0.00018368456385349334,
"loss": 0.0146,
"step": 650
},
{
"epoch": 2.8085106382978724,
"grad_norm": 0.07851342856884003,
"learning_rate": 0.0001831933517594957,
"loss": 0.0301,
"step": 660
},
{
"epoch": 2.851063829787234,
"grad_norm": 0.11461298912763596,
"learning_rate": 0.0001826955317078636,
"loss": 0.0155,
"step": 670
},
{
"epoch": 2.8936170212765955,
"grad_norm": 0.1839868575334549,
"learning_rate": 0.00018219114323990345,
"loss": 0.0099,
"step": 680
},
{
"epoch": 2.9361702127659575,
"grad_norm": 0.04199373722076416,
"learning_rate": 0.00018168022641864377,
"loss": 0.0192,
"step": 690
},
{
"epoch": 2.978723404255319,
"grad_norm": 0.3203773498535156,
"learning_rate": 0.00018116282182565311,
"loss": 0.0272,
"step": 700
},
{
"epoch": 3.021276595744681,
"grad_norm": 0.327921599149704,
"learning_rate": 0.0001806389705578168,
"loss": 0.0081,
"step": 710
},
{
"epoch": 3.0638297872340425,
"grad_norm": 0.29155433177948,
"learning_rate": 0.00018010871422407236,
"loss": 0.0128,
"step": 720
},
{
"epoch": 3.106382978723404,
"grad_norm": 0.06926452368497849,
"learning_rate": 0.00017957209494210493,
"loss": 0.0171,
"step": 730
},
{
"epoch": 3.148936170212766,
"grad_norm": 0.08231089264154434,
"learning_rate": 0.0001790291553350016,
"loss": 0.0098,
"step": 740
},
{
"epoch": 3.1914893617021276,
"grad_norm": 0.14707215130329132,
"learning_rate": 0.0001784799385278661,
"loss": 0.0092,
"step": 750
},
{
"epoch": 3.2340425531914896,
"grad_norm": 0.2735896706581116,
"learning_rate": 0.00017792448814439333,
"loss": 0.0115,
"step": 760
},
{
"epoch": 3.276595744680851,
"grad_norm": 0.44960370659828186,
"learning_rate": 0.00017736284830340436,
"loss": 0.0195,
"step": 770
},
{
"epoch": 3.3191489361702127,
"grad_norm": 0.13413724303245544,
"learning_rate": 0.00017679506361534215,
"loss": 0.0187,
"step": 780
},
{
"epoch": 3.3617021276595747,
"grad_norm": 0.24698686599731445,
"learning_rate": 0.00017622117917872823,
"loss": 0.0125,
"step": 790
},
{
"epoch": 3.404255319148936,
"grad_norm": 0.48694342374801636,
"learning_rate": 0.00017564124057658056,
"loss": 0.0234,
"step": 800
},
{
"epoch": 3.4468085106382977,
"grad_norm": 0.6931429505348206,
"learning_rate": 0.00017505529387279277,
"loss": 0.0234,
"step": 810
},
{
"epoch": 3.4893617021276597,
"grad_norm": 0.13700473308563232,
"learning_rate": 0.00017446338560847568,
"loss": 0.0145,
"step": 820
},
{
"epoch": 3.5319148936170213,
"grad_norm": 0.3254775404930115,
"learning_rate": 0.00017386556279826021,
"loss": 0.0179,
"step": 830
},
{
"epoch": 3.574468085106383,
"grad_norm": 0.368379682302475,
"learning_rate": 0.00017326187292656333,
"loss": 0.013,
"step": 840
},
{
"epoch": 3.617021276595745,
"grad_norm": 0.2745888829231262,
"learning_rate": 0.00017265236394381633,
"loss": 0.0136,
"step": 850
},
{
"epoch": 3.6595744680851063,
"grad_norm": 0.0781714916229248,
"learning_rate": 0.00017203708426265614,
"loss": 0.0126,
"step": 860
},
{
"epoch": 3.702127659574468,
"grad_norm": 0.7518234848976135,
"learning_rate": 0.00017141608275408006,
"loss": 0.0134,
"step": 870
},
{
"epoch": 3.74468085106383,
"grad_norm": 0.4271518886089325,
"learning_rate": 0.00017078940874356392,
"loss": 0.0127,
"step": 880
},
{
"epoch": 3.7872340425531914,
"grad_norm": 0.34387120604515076,
"learning_rate": 0.00017015711200714414,
"loss": 0.0161,
"step": 890
},
{
"epoch": 3.829787234042553,
"grad_norm": 0.09641717374324799,
"learning_rate": 0.00016951924276746425,
"loss": 0.0185,
"step": 900
},
{
"epoch": 3.872340425531915,
"grad_norm": 0.44215765595436096,
"learning_rate": 0.00016887585168978562,
"loss": 0.0115,
"step": 910
},
{
"epoch": 3.9148936170212765,
"grad_norm": 0.06954783201217651,
"learning_rate": 0.0001682269898779632,
"loss": 0.0121,
"step": 920
},
{
"epoch": 3.9574468085106385,
"grad_norm": 0.21080243587493896,
"learning_rate": 0.00016757270887038654,
"loss": 0.0125,
"step": 930
},
{
"epoch": 4.0,
"grad_norm": 0.3382236063480377,
"learning_rate": 0.00016691306063588583,
"loss": 0.0109,
"step": 940
},
{
"epoch": 4.042553191489362,
"grad_norm": 0.36123183369636536,
"learning_rate": 0.00016624809756960444,
"loss": 0.0093,
"step": 950
},
{
"epoch": 4.085106382978723,
"grad_norm": 0.45228853821754456,
"learning_rate": 0.00016557787248883696,
"loss": 0.0117,
"step": 960
},
{
"epoch": 4.127659574468085,
"grad_norm": 0.2724202275276184,
"learning_rate": 0.00016490243862883413,
"loss": 0.0126,
"step": 970
},
{
"epoch": 4.170212765957447,
"grad_norm": 0.17904357612133026,
"learning_rate": 0.00016422184963857432,
"loss": 0.0103,
"step": 980
},
{
"epoch": 4.212765957446808,
"grad_norm": 0.4267734885215759,
"learning_rate": 0.00016353615957650236,
"loss": 0.0105,
"step": 990
},
{
"epoch": 4.25531914893617,
"grad_norm": 0.11816457659006119,
"learning_rate": 0.00016284542290623567,
"loss": 0.0097,
"step": 1000
},
{
"epoch": 4.297872340425532,
"grad_norm": 0.04166145250201225,
"learning_rate": 0.00016214969449223824,
"loss": 0.0116,
"step": 1010
},
{
"epoch": 4.340425531914893,
"grad_norm": 0.0687410831451416,
"learning_rate": 0.00016144902959546286,
"loss": 0.0088,
"step": 1020
},
{
"epoch": 4.382978723404255,
"grad_norm": 0.37416237592697144,
"learning_rate": 0.00016074348386896177,
"loss": 0.019,
"step": 1030
},
{
"epoch": 4.425531914893617,
"grad_norm": 0.06069188937544823,
"learning_rate": 0.00016003311335346636,
"loss": 0.0138,
"step": 1040
},
{
"epoch": 4.468085106382979,
"grad_norm": 0.0817495658993721,
"learning_rate": 0.00015931797447293552,
"loss": 0.0084,
"step": 1050
},
{
"epoch": 4.51063829787234,
"grad_norm": 0.09787007421255112,
"learning_rate": 0.00015859812403007443,
"loss": 0.009,
"step": 1060
},
{
"epoch": 4.553191489361702,
"grad_norm": 0.06179153174161911,
"learning_rate": 0.0001578736192018224,
"loss": 0.008,
"step": 1070
},
{
"epoch": 4.595744680851064,
"grad_norm": 0.3092339038848877,
"learning_rate": 0.00015714451753481168,
"loss": 0.0131,
"step": 1080
},
{
"epoch": 4.638297872340425,
"grad_norm": 0.06405780464410782,
"learning_rate": 0.0001564108769407962,
"loss": 0.0122,
"step": 1090
},
{
"epoch": 4.680851063829787,
"grad_norm": 0.21473410725593567,
"learning_rate": 0.00015567275569205218,
"loss": 0.0117,
"step": 1100
},
{
"epoch": 4.723404255319149,
"grad_norm": 0.3080317974090576,
"learning_rate": 0.00015493021241674918,
"loss": 0.011,
"step": 1110
},
{
"epoch": 4.76595744680851,
"grad_norm": 0.6501839756965637,
"learning_rate": 0.0001541833060942937,
"loss": 0.0124,
"step": 1120
},
{
"epoch": 4.808510638297872,
"grad_norm": 0.04628886282444,
"learning_rate": 0.00015343209605064422,
"loss": 0.0082,
"step": 1130
},
{
"epoch": 4.851063829787234,
"grad_norm": 0.059793177992105484,
"learning_rate": 0.00015267664195359917,
"loss": 0.0072,
"step": 1140
},
{
"epoch": 4.8936170212765955,
"grad_norm": 0.052162788808345795,
"learning_rate": 0.00015191700380805752,
"loss": 0.0095,
"step": 1150
},
{
"epoch": 4.9361702127659575,
"grad_norm": 0.06147542968392372,
"learning_rate": 0.00015115324195125274,
"loss": 0.0098,
"step": 1160
},
{
"epoch": 4.9787234042553195,
"grad_norm": 0.6586833000183105,
"learning_rate": 0.00015038541704796003,
"loss": 0.0139,
"step": 1170
},
{
"epoch": 5.0212765957446805,
"grad_norm": 0.2920892834663391,
"learning_rate": 0.0001496135900856782,
"loss": 0.0083,
"step": 1180
},
{
"epoch": 5.0638297872340425,
"grad_norm": 0.32393306493759155,
"learning_rate": 0.0001488378223697851,
"loss": 0.0155,
"step": 1190
},
{
"epoch": 5.1063829787234045,
"grad_norm": 0.49029844999313354,
"learning_rate": 0.00014805817551866838,
"loss": 0.0109,
"step": 1200
},
{
"epoch": 5.148936170212766,
"grad_norm": 0.05497799441218376,
"learning_rate": 0.00014727471145883127,
"loss": 0.0095,
"step": 1210
},
{
"epoch": 5.191489361702128,
"grad_norm": 0.4540445804595947,
"learning_rate": 0.00014648749241997363,
"loss": 0.0106,
"step": 1220
},
{
"epoch": 5.23404255319149,
"grad_norm": 0.16598157584667206,
"learning_rate": 0.00014569658093004935,
"loss": 0.0072,
"step": 1230
},
{
"epoch": 5.276595744680851,
"grad_norm": 0.07160704582929611,
"learning_rate": 0.0001449020398102996,
"loss": 0.0108,
"step": 1240
},
{
"epoch": 5.319148936170213,
"grad_norm": 0.197789266705513,
"learning_rate": 0.00014410393217026318,
"loss": 0.0118,
"step": 1250
},
{
"epoch": 5.361702127659575,
"grad_norm": 0.07983817905187607,
"learning_rate": 0.00014330232140276366,
"loss": 0.0076,
"step": 1260
},
{
"epoch": 5.404255319148936,
"grad_norm": 0.0746329054236412,
"learning_rate": 0.00014249727117887425,
"loss": 0.0089,
"step": 1270
},
{
"epoch": 5.446808510638298,
"grad_norm": 0.09392493963241577,
"learning_rate": 0.00014168884544286053,
"loss": 0.0103,
"step": 1280
},
{
"epoch": 5.48936170212766,
"grad_norm": 0.18386998772621155,
"learning_rate": 0.0001408771084071012,
"loss": 0.0096,
"step": 1290
},
{
"epoch": 5.531914893617021,
"grad_norm": 0.07400283962488174,
"learning_rate": 0.00014006212454698797,
"loss": 0.0083,
"step": 1300
},
{
"epoch": 5.574468085106383,
"grad_norm": 0.06513144075870514,
"learning_rate": 0.00013924395859580432,
"loss": 0.0093,
"step": 1310
},
{
"epoch": 5.617021276595745,
"grad_norm": 0.6950928568840027,
"learning_rate": 0.00013842267553958371,
"loss": 0.0073,
"step": 1320
},
{
"epoch": 5.659574468085106,
"grad_norm": 0.05320321023464203,
"learning_rate": 0.00013759834061194794,
"loss": 0.0098,
"step": 1330
},
{
"epoch": 5.702127659574468,
"grad_norm": 0.17569933831691742,
"learning_rate": 0.00013677101928892554,
"loss": 0.0075,
"step": 1340
},
{
"epoch": 5.74468085106383,
"grad_norm": 0.052122730761766434,
"learning_rate": 0.00013594077728375128,
"loss": 0.0107,
"step": 1350
},
{
"epoch": 5.787234042553192,
"grad_norm": 0.2108752280473709,
"learning_rate": 0.00013510768054164653,
"loss": 0.0119,
"step": 1360
},
{
"epoch": 5.829787234042553,
"grad_norm": 0.047486983239650726,
"learning_rate": 0.00013427179523458127,
"loss": 0.0092,
"step": 1370
},
{
"epoch": 5.872340425531915,
"grad_norm": 0.043320391327142715,
"learning_rate": 0.0001334331877560182,
"loss": 0.0081,
"step": 1380
},
{
"epoch": 5.914893617021277,
"grad_norm": 0.09155077487230301,
"learning_rate": 0.00013259192471563912,
"loss": 0.0091,
"step": 1390
},
{
"epoch": 5.957446808510638,
"grad_norm": 0.049143675714731216,
"learning_rate": 0.00013174807293405428,
"loss": 0.0089,
"step": 1400
},
{
"epoch": 6.0,
"grad_norm": 0.07365540415048599,
"learning_rate": 0.00013090169943749476,
"loss": 0.0068,
"step": 1410
},
{
"epoch": 6.042553191489362,
"grad_norm": 0.11437718570232391,
"learning_rate": 0.00013005287145248878,
"loss": 0.0064,
"step": 1420
},
{
"epoch": 6.085106382978723,
"grad_norm": 0.06010650098323822,
"learning_rate": 0.0001292016564005219,
"loss": 0.0074,
"step": 1430
},
{
"epoch": 6.127659574468085,
"grad_norm": 0.04595167934894562,
"learning_rate": 0.0001283481218926818,
"loss": 0.0066,
"step": 1440
},
{
"epoch": 6.170212765957447,
"grad_norm": 0.05351310595870018,
"learning_rate": 0.00012749233572428804,
"loss": 0.0097,
"step": 1450
},
{
"epoch": 6.212765957446808,
"grad_norm": 0.13630953431129456,
"learning_rate": 0.00012663436586950714,
"loss": 0.0079,
"step": 1460
},
{
"epoch": 6.25531914893617,
"grad_norm": 0.17622588574886322,
"learning_rate": 0.00012577428047595344,
"loss": 0.0084,
"step": 1470
},
{
"epoch": 6.297872340425532,
"grad_norm": 0.050954531878232956,
"learning_rate": 0.0001249121478592762,
"loss": 0.0077,
"step": 1480
},
{
"epoch": 6.340425531914893,
"grad_norm": 0.3051726818084717,
"learning_rate": 0.0001240480364977335,
"loss": 0.0085,
"step": 1490
},
{
"epoch": 6.382978723404255,
"grad_norm": 0.30509302020072937,
"learning_rate": 0.00012318201502675285,
"loss": 0.0092,
"step": 1500
},
{
"epoch": 6.425531914893617,
"grad_norm": 0.09164142608642578,
"learning_rate": 0.00012231415223347972,
"loss": 0.008,
"step": 1510
},
{
"epoch": 6.468085106382979,
"grad_norm": 0.05406223237514496,
"learning_rate": 0.0001214445170513139,
"loss": 0.0078,
"step": 1520
},
{
"epoch": 6.51063829787234,
"grad_norm": 0.05845744535326958,
"learning_rate": 0.00012057317855443395,
"loss": 0.0092,
"step": 1530
},
{
"epoch": 6.553191489361702,
"grad_norm": 0.05021122843027115,
"learning_rate": 0.00011970020595231101,
"loss": 0.007,
"step": 1540
},
{
"epoch": 6.595744680851064,
"grad_norm": 0.10315235704183578,
"learning_rate": 0.00011882566858421135,
"loss": 0.0068,
"step": 1550
},
{
"epoch": 6.638297872340425,
"grad_norm": 0.08750782907009125,
"learning_rate": 0.00011794963591368893,
"loss": 0.009,
"step": 1560
},
{
"epoch": 6.680851063829787,
"grad_norm": 0.05412838235497475,
"learning_rate": 0.0001170721775230679,
"loss": 0.0071,
"step": 1570
},
{
"epoch": 6.723404255319149,
"grad_norm": 0.17292432487010956,
"learning_rate": 0.00011619336310791586,
"loss": 0.0091,
"step": 1580
},
{
"epoch": 6.76595744680851,
"grad_norm": 0.05503688380122185,
"learning_rate": 0.00011531326247150803,
"loss": 0.0069,
"step": 1590
},
{
"epoch": 6.808510638297872,
"grad_norm": 0.05121155083179474,
"learning_rate": 0.00011443194551928266,
"loss": 0.008,
"step": 1600
},
{
"epoch": 6.851063829787234,
"grad_norm": 0.0626005157828331,
"learning_rate": 0.00011354948225328877,
"loss": 0.0065,
"step": 1610
},
{
"epoch": 6.8936170212765955,
"grad_norm": 0.058921247720718384,
"learning_rate": 0.0001126659427666257,
"loss": 0.0078,
"step": 1620
},
{
"epoch": 6.9361702127659575,
"grad_norm": 0.058523211628198624,
"learning_rate": 0.00011178139723787597,
"loss": 0.008,
"step": 1630
},
{
"epoch": 6.9787234042553195,
"grad_norm": 0.18594586849212646,
"learning_rate": 0.00011089591592553082,
"loss": 0.0076,
"step": 1640
},
{
"epoch": 7.0212765957446805,
"grad_norm": 0.053747180849313736,
"learning_rate": 0.00011000956916240985,
"loss": 0.0074,
"step": 1650
},
{
"epoch": 7.0638297872340425,
"grad_norm": 0.03964696079492569,
"learning_rate": 0.00010912242735007441,
"loss": 0.0071,
"step": 1660
},
{
"epoch": 7.1063829787234045,
"grad_norm": 0.05952566862106323,
"learning_rate": 0.00010823456095323579,
"loss": 0.0065,
"step": 1670
},
{
"epoch": 7.148936170212766,
"grad_norm": 0.21424749493598938,
"learning_rate": 0.00010734604049415822,
"loss": 0.0075,
"step": 1680
},
{
"epoch": 7.191489361702128,
"grad_norm": 0.03922433406114578,
"learning_rate": 0.0001064569365470574,
"loss": 0.0071,
"step": 1690
},
{
"epoch": 7.23404255319149,
"grad_norm": 0.05505215749144554,
"learning_rate": 0.00010556731973249485,
"loss": 0.0061,
"step": 1700
},
{
"epoch": 7.276595744680851,
"grad_norm": 0.03699268028140068,
"learning_rate": 0.00010467726071176853,
"loss": 0.0075,
"step": 1710
},
{
"epoch": 7.319148936170213,
"grad_norm": 0.04546520113945007,
"learning_rate": 0.00010378683018130047,
"loss": 0.0072,
"step": 1720
},
{
"epoch": 7.361702127659575,
"grad_norm": 0.056984953582286835,
"learning_rate": 0.0001028960988670212,
"loss": 0.007,
"step": 1730
},
{
"epoch": 7.404255319148936,
"grad_norm": 0.24714773893356323,
"learning_rate": 0.00010200513751875227,
"loss": 0.0074,
"step": 1740
},
{
"epoch": 7.446808510638298,
"grad_norm": 0.06558862328529358,
"learning_rate": 0.00010111401690458654,
"loss": 0.0064,
"step": 1750
},
{
"epoch": 7.48936170212766,
"grad_norm": 0.06254340708255768,
"learning_rate": 0.00010022280780526725,
"loss": 0.0076,
"step": 1760
},
{
"epoch": 7.531914893617021,
"grad_norm": 0.08606445789337158,
"learning_rate": 9.93315810085658e-05,
"loss": 0.0076,
"step": 1770
},
{
"epoch": 7.574468085106383,
"grad_norm": 0.051956657320261,
"learning_rate": 9.844040730365936e-05,
"loss": 0.0073,
"step": 1780
},
{
"epoch": 7.617021276595745,
"grad_norm": 0.24819940328598022,
"learning_rate": 9.754935747550804e-05,
"loss": 0.0077,
"step": 1790
},
{
"epoch": 7.659574468085106,
"grad_norm": 0.039045918732881546,
"learning_rate": 9.665850229923258e-05,
"loss": 0.0071,
"step": 1800
},
{
"epoch": 7.702127659574468,
"grad_norm": 0.04438428208231926,
"learning_rate": 9.57679125344927e-05,
"loss": 0.0074,
"step": 1810
},
{
"epoch": 7.74468085106383,
"grad_norm": 0.31067439913749695,
"learning_rate": 9.487765891986682e-05,
"loss": 0.0087,
"step": 1820
},
{
"epoch": 7.787234042553192,
"grad_norm": 0.06251853704452515,
"learning_rate": 9.398781216723331e-05,
"loss": 0.0069,
"step": 1830
},
{
"epoch": 7.829787234042553,
"grad_norm": 0.04706185683608055,
"learning_rate": 9.309844295615389e-05,
"loss": 0.0072,
"step": 1840
},
{
"epoch": 7.872340425531915,
"grad_norm": 0.03839968144893646,
"learning_rate": 9.220962192825968e-05,
"loss": 0.0069,
"step": 1850
},
{
"epoch": 7.914893617021277,
"grad_norm": 0.03722318261861801,
"learning_rate": 9.132141968164026e-05,
"loss": 0.0069,
"step": 1860
},
{
"epoch": 7.957446808510638,
"grad_norm": 0.0543668158352375,
"learning_rate": 9.043390676523604e-05,
"loss": 0.0076,
"step": 1870
},
{
"epoch": 8.0,
"grad_norm": 0.051696889102458954,
"learning_rate": 8.954715367323468e-05,
"loss": 0.0066,
"step": 1880
},
{
"epoch": 8.042553191489361,
"grad_norm": 0.08786992728710175,
"learning_rate": 8.866123083947182e-05,
"loss": 0.0062,
"step": 1890
},
{
"epoch": 8.085106382978724,
"grad_norm": 0.3556790351867676,
"learning_rate": 8.777620863183657e-05,
"loss": 0.0079,
"step": 1900
},
{
"epoch": 8.127659574468085,
"grad_norm": 0.08049122244119644,
"learning_rate": 8.689215734668232e-05,
"loss": 0.0064,
"step": 1910
},
{
"epoch": 8.170212765957446,
"grad_norm": 0.05408492311835289,
"learning_rate": 8.600914720324316e-05,
"loss": 0.0077,
"step": 1920
},
{
"epoch": 8.212765957446809,
"grad_norm": 0.034946855157613754,
"learning_rate": 8.512724833805634e-05,
"loss": 0.0067,
"step": 1930
},
{
"epoch": 8.25531914893617,
"grad_norm": 0.05422681197524071,
"learning_rate": 8.424653079939156e-05,
"loss": 0.0062,
"step": 1940
},
{
"epoch": 8.297872340425531,
"grad_norm": 0.06405791640281677,
"learning_rate": 8.336706454168701e-05,
"loss": 0.0064,
"step": 1950
},
{
"epoch": 8.340425531914894,
"grad_norm": 0.08694509416818619,
"learning_rate": 8.248891941999297e-05,
"loss": 0.006,
"step": 1960
},
{
"epoch": 8.382978723404255,
"grad_norm": 0.05589594319462776,
"learning_rate": 8.161216518442334e-05,
"loss": 0.0067,
"step": 1970
},
{
"epoch": 8.425531914893616,
"grad_norm": 0.05914654955267906,
"learning_rate": 8.073687147461547e-05,
"loss": 0.0065,
"step": 1980
},
{
"epoch": 8.46808510638298,
"grad_norm": 0.05237606540322304,
"learning_rate": 7.98631078141987e-05,
"loss": 0.0071,
"step": 1990
},
{
"epoch": 8.51063829787234,
"grad_norm": 0.056138359010219574,
"learning_rate": 7.89909436052722e-05,
"loss": 0.0079,
"step": 2000
},
{
"epoch": 8.553191489361701,
"grad_norm": 0.06285829097032547,
"learning_rate": 7.812044812289249e-05,
"loss": 0.0064,
"step": 2010
},
{
"epoch": 8.595744680851064,
"grad_norm": 0.04014954715967178,
"learning_rate": 7.72516905095709e-05,
"loss": 0.0072,
"step": 2020
},
{
"epoch": 8.638297872340425,
"grad_norm": 0.04423344135284424,
"learning_rate": 7.638473976978177e-05,
"loss": 0.0065,
"step": 2030
},
{
"epoch": 8.680851063829786,
"grad_norm": 0.05287999287247658,
"learning_rate": 7.55196647644814e-05,
"loss": 0.0081,
"step": 2040
},
{
"epoch": 8.72340425531915,
"grad_norm": 0.056493304669857025,
"learning_rate": 7.465653420563845e-05,
"loss": 0.0067,
"step": 2050
},
{
"epoch": 8.76595744680851,
"grad_norm": 0.057281821966171265,
"learning_rate": 7.379541665077643e-05,
"loss": 0.0078,
"step": 2060
},
{
"epoch": 8.808510638297872,
"grad_norm": 0.053731102496385574,
"learning_rate": 7.293638049752812e-05,
"loss": 0.0066,
"step": 2070
},
{
"epoch": 8.851063829787234,
"grad_norm": 0.18697482347488403,
"learning_rate": 7.207949397820278e-05,
"loss": 0.0069,
"step": 2080
},
{
"epoch": 8.893617021276595,
"grad_norm": 0.0387713797390461,
"learning_rate": 7.122482515436661e-05,
"loss": 0.007,
"step": 2090
},
{
"epoch": 8.936170212765958,
"grad_norm": 0.04441935196518898,
"learning_rate": 7.037244191143661e-05,
"loss": 0.0067,
"step": 2100
},
{
"epoch": 8.97872340425532,
"grad_norm": 0.05758345127105713,
"learning_rate": 6.952241195328868e-05,
"loss": 0.0065,
"step": 2110
},
{
"epoch": 9.02127659574468,
"grad_norm": 0.050706226378679276,
"learning_rate": 6.867480279687974e-05,
"loss": 0.0063,
"step": 2120
},
{
"epoch": 9.063829787234043,
"grad_norm": 0.05180887505412102,
"learning_rate": 6.782968176688514e-05,
"loss": 0.0062,
"step": 2130
},
{
"epoch": 9.106382978723405,
"grad_norm": 0.05492401868104935,
"learning_rate": 6.6987115990351e-05,
"loss": 0.006,
"step": 2140
},
{
"epoch": 9.148936170212766,
"grad_norm": 0.053439777344465256,
"learning_rate": 6.614717239136246e-05,
"loss": 0.0066,
"step": 2150
},
{
"epoch": 9.191489361702128,
"grad_norm": 0.15650008618831635,
"learning_rate": 6.530991768572794e-05,
"loss": 0.006,
"step": 2160
},
{
"epoch": 9.23404255319149,
"grad_norm": 0.04846300184726715,
"learning_rate": 6.447541837568e-05,
"loss": 0.0068,
"step": 2170
},
{
"epoch": 9.27659574468085,
"grad_norm": 0.047157011926174164,
"learning_rate": 6.364374074459307e-05,
"loss": 0.006,
"step": 2180
},
{
"epoch": 9.319148936170214,
"grad_norm": 0.047358132898807526,
"learning_rate": 6.281495085171869e-05,
"loss": 0.0058,
"step": 2190
},
{
"epoch": 9.361702127659575,
"grad_norm": 0.11200093477964401,
"learning_rate": 6.198911452693853e-05,
"loss": 0.007,
"step": 2200
},
{
"epoch": 9.404255319148936,
"grad_norm": 0.06481984257698059,
"learning_rate": 6.116629736553552e-05,
"loss": 0.0069,
"step": 2210
},
{
"epoch": 9.446808510638299,
"grad_norm": 0.04743931442499161,
"learning_rate": 6.0346564722983736e-05,
"loss": 0.0072,
"step": 2220
},
{
"epoch": 9.48936170212766,
"grad_norm": 0.07307924330234528,
"learning_rate": 5.952998170975724e-05,
"loss": 0.0062,
"step": 2230
},
{
"epoch": 9.53191489361702,
"grad_norm": 0.05650079995393753,
"learning_rate": 5.871661318615848e-05,
"loss": 0.0061,
"step": 2240
},
{
"epoch": 9.574468085106384,
"grad_norm": 0.057734813541173935,
"learning_rate": 5.790652375716652e-05,
"loss": 0.0068,
"step": 2250
},
{
"epoch": 9.617021276595745,
"grad_norm": 0.05608903244137764,
"learning_rate": 5.709977776730537e-05,
"loss": 0.0071,
"step": 2260
},
{
"epoch": 9.659574468085106,
"grad_norm": 0.07133158296346664,
"learning_rate": 5.62964392955335e-05,
"loss": 0.0067,
"step": 2270
},
{
"epoch": 9.702127659574469,
"grad_norm": 0.2167043685913086,
"learning_rate": 5.549657215015367e-05,
"loss": 0.0067,
"step": 2280
},
{
"epoch": 9.74468085106383,
"grad_norm": 0.06523007154464722,
"learning_rate": 5.470023986374516e-05,
"loss": 0.0068,
"step": 2290
},
{
"epoch": 9.787234042553191,
"grad_norm": 0.04895370826125145,
"learning_rate": 5.39075056881172e-05,
"loss": 0.0058,
"step": 2300
},
{
"epoch": 9.829787234042554,
"grad_norm": 0.056433409452438354,
"learning_rate": 5.31184325892849e-05,
"loss": 0.0067,
"step": 2310
},
{
"epoch": 9.872340425531915,
"grad_norm": 0.049970466643571854,
"learning_rate": 5.233308324246805e-05,
"loss": 0.0058,
"step": 2320
},
{
"epoch": 9.914893617021276,
"grad_norm": 0.08367093652486801,
"learning_rate": 5.155152002711285e-05,
"loss": 0.0063,
"step": 2330
},
{
"epoch": 9.957446808510639,
"grad_norm": 0.07821632921695709,
"learning_rate": 5.077380502193725e-05,
"loss": 0.0071,
"step": 2340
},
{
"epoch": 10.0,
"grad_norm": 0.04344159737229347,
"learning_rate": 5.000000000000002e-05,
"loss": 0.0078,
"step": 2350
},
{
"epoch": 10.042553191489361,
"grad_norm": 0.06118736043572426,
"learning_rate": 4.923016642379412e-05,
"loss": 0.0061,
"step": 2360
},
{
"epoch": 10.085106382978724,
"grad_norm": 0.07500752061605453,
"learning_rate": 4.8464365440365044e-05,
"loss": 0.006,
"step": 2370
},
{
"epoch": 10.127659574468085,
"grad_norm": 0.05192973092198372,
"learning_rate": 4.7702657876453616e-05,
"loss": 0.0059,
"step": 2380
},
{
"epoch": 10.170212765957446,
"grad_norm": 0.07265878468751907,
"learning_rate": 4.6945104233665006e-05,
"loss": 0.006,
"step": 2390
},
{
"epoch": 10.212765957446809,
"grad_norm": 0.07713906466960907,
"learning_rate": 4.6191764683662744e-05,
"loss": 0.0065,
"step": 2400
},
{
"epoch": 10.25531914893617,
"grad_norm": 0.1284199059009552,
"learning_rate": 4.5442699063389705e-05,
"loss": 0.0061,
"step": 2410
},
{
"epoch": 10.297872340425531,
"grad_norm": 0.03467699885368347,
"learning_rate": 4.469796687031502e-05,
"loss": 0.0056,
"step": 2420
},
{
"epoch": 10.340425531914894,
"grad_norm": 0.05337784066796303,
"learning_rate": 4.395762725770852e-05,
"loss": 0.006,
"step": 2430
},
{
"epoch": 10.382978723404255,
"grad_norm": 0.054092224687337875,
"learning_rate": 4.322173902994212e-05,
"loss": 0.0065,
"step": 2440
},
{
"epoch": 10.425531914893616,
"grad_norm": 0.044108908623456955,
"learning_rate": 4.249036063781896e-05,
"loss": 0.0058,
"step": 2450
},
{
"epoch": 10.46808510638298,
"grad_norm": 0.1287708580493927,
"learning_rate": 4.176355017393099e-05,
"loss": 0.0062,
"step": 2460
},
{
"epoch": 10.51063829787234,
"grad_norm": 0.03487354889512062,
"learning_rate": 4.10413653680444e-05,
"loss": 0.0065,
"step": 2470
},
{
"epoch": 10.553191489361701,
"grad_norm": 0.06098255515098572,
"learning_rate": 4.03238635825146e-05,
"loss": 0.0058,
"step": 2480
},
{
"epoch": 10.595744680851064,
"grad_norm": 0.04999317228794098,
"learning_rate": 3.961110180772955e-05,
"loss": 0.0059,
"step": 2490
},
{
"epoch": 10.638297872340425,
"grad_norm": 0.05019402131438255,
"learning_rate": 3.890313665758348e-05,
"loss": 0.0061,
"step": 2500
},
{
"epoch": 10.680851063829786,
"grad_norm": 0.0514717735350132,
"learning_rate": 3.8200024364979815e-05,
"loss": 0.006,
"step": 2510
},
{
"epoch": 10.72340425531915,
"grad_norm": 0.048151805996894836,
"learning_rate": 3.750182077736486e-05,
"loss": 0.0064,
"step": 2520
},
{
"epoch": 10.76595744680851,
"grad_norm": 0.08788628876209259,
"learning_rate": 3.6808581352291716e-05,
"loss": 0.006,
"step": 2530
},
{
"epoch": 10.808510638297872,
"grad_norm": 0.08574005961418152,
"learning_rate": 3.612036115301551e-05,
"loss": 0.0064,
"step": 2540
},
{
"epoch": 10.851063829787234,
"grad_norm": 0.05240662768483162,
"learning_rate": 3.543721484411976e-05,
"loss": 0.0065,
"step": 2550
},
{
"epoch": 10.893617021276595,
"grad_norm": 0.05454961955547333,
"learning_rate": 3.475919668717422e-05,
"loss": 0.006,
"step": 2560
},
{
"epoch": 10.936170212765958,
"grad_norm": 0.052435703575611115,
"learning_rate": 3.408636053642528e-05,
"loss": 0.0058,
"step": 2570
},
{
"epoch": 10.97872340425532,
"grad_norm": 0.09059314429759979,
"learning_rate": 3.3418759834518056e-05,
"loss": 0.0061,
"step": 2580
},
{
"epoch": 11.02127659574468,
"grad_norm": 0.04699910059571266,
"learning_rate": 3.275644760825168e-05,
"loss": 0.006,
"step": 2590
},
{
"epoch": 11.063829787234043,
"grad_norm": 0.0523945577442646,
"learning_rate": 3.209947646436752e-05,
"loss": 0.0053,
"step": 2600
},
{
"epoch": 11.106382978723405,
"grad_norm": 0.09489481896162033,
"learning_rate": 3.1447898585370384e-05,
"loss": 0.0061,
"step": 2610
},
{
"epoch": 11.148936170212766,
"grad_norm": 0.04296072944998741,
"learning_rate": 3.0801765725384066e-05,
"loss": 0.006,
"step": 2620
},
{
"epoch": 11.191489361702128,
"grad_norm": 0.05972497537732124,
"learning_rate": 3.0161129206040284e-05,
"loss": 0.0061,
"step": 2630
},
{
"epoch": 11.23404255319149,
"grad_norm": 0.06595079600811005,
"learning_rate": 2.9526039912402503e-05,
"loss": 0.0058,
"step": 2640
},
{
"epoch": 11.27659574468085,
"grad_norm": 0.04778233543038368,
"learning_rate": 2.889654828892393e-05,
"loss": 0.0059,
"step": 2650
},
{
"epoch": 11.319148936170214,
"grad_norm": 0.06364385783672333,
"learning_rate": 2.8272704335441058e-05,
"loss": 0.0059,
"step": 2660
},
{
"epoch": 11.361702127659575,
"grad_norm": 0.04678649827837944,
"learning_rate": 2.7654557603201957e-05,
"loss": 0.0058,
"step": 2670
},
{
"epoch": 11.404255319148936,
"grad_norm": 0.06067119911313057,
"learning_rate": 2.704215719093066e-05,
"loss": 0.0062,
"step": 2680
},
{
"epoch": 11.446808510638299,
"grad_norm": 0.05602847784757614,
"learning_rate": 2.643555174092728e-05,
"loss": 0.0064,
"step": 2690
},
{
"epoch": 11.48936170212766,
"grad_norm": 0.06131875514984131,
"learning_rate": 2.5834789435204243e-05,
"loss": 0.0059,
"step": 2700
},
{
"epoch": 11.53191489361702,
"grad_norm": 0.049852412194013596,
"learning_rate": 2.5239917991659512e-05,
"loss": 0.006,
"step": 2710
},
{
"epoch": 11.574468085106384,
"grad_norm": 0.06347551941871643,
"learning_rate": 2.4650984660286124e-05,
"loss": 0.0058,
"step": 2720
},
{
"epoch": 11.617021276595745,
"grad_norm": 0.0802001804113388,
"learning_rate": 2.4068036219419432e-05,
"loss": 0.0063,
"step": 2730
},
{
"epoch": 11.659574468085106,
"grad_norm": 0.15571121871471405,
"learning_rate": 2.349111897202134e-05,
"loss": 0.0061,
"step": 2740
},
{
"epoch": 11.702127659574469,
"grad_norm": 0.05023673549294472,
"learning_rate": 2.2920278742002676e-05,
"loss": 0.0058,
"step": 2750
},
{
"epoch": 11.74468085106383,
"grad_norm": 0.06210224702954292,
"learning_rate": 2.235556087058328e-05,
"loss": 0.0057,
"step": 2760
},
{
"epoch": 11.787234042553191,
"grad_norm": 0.054550446569919586,
"learning_rate": 2.1797010212690795e-05,
"loss": 0.0059,
"step": 2770
},
{
"epoch": 11.829787234042554,
"grad_norm": 0.06770533323287964,
"learning_rate": 2.1244671133397753e-05,
"loss": 0.0058,
"step": 2780
},
{
"epoch": 11.872340425531915,
"grad_norm": 0.07317465543746948,
"learning_rate": 2.069858750439768e-05,
"loss": 0.0061,
"step": 2790
},
{
"epoch": 11.914893617021276,
"grad_norm": 0.04136810824275017,
"learning_rate": 2.0158802700520574e-05,
"loss": 0.0061,
"step": 2800
},
{
"epoch": 11.957446808510639,
"grad_norm": 0.06249478831887245,
"learning_rate": 1.9625359596287496e-05,
"loss": 0.0061,
"step": 2810
},
{
"epoch": 12.0,
"grad_norm": 0.07229502499103546,
"learning_rate": 1.9098300562505266e-05,
"loss": 0.0061,
"step": 2820
},
{
"epoch": 12.042553191489361,
"grad_norm": 0.05772854760289192,
"learning_rate": 1.857766746290084e-05,
"loss": 0.0059,
"step": 2830
},
{
"epoch": 12.085106382978724,
"grad_norm": 0.05857497826218605,
"learning_rate": 1.8063501650796288e-05,
"loss": 0.0053,
"step": 2840
},
{
"epoch": 12.127659574468085,
"grad_norm": 0.0488237701356411,
"learning_rate": 1.7555843965823992e-05,
"loss": 0.006,
"step": 2850
},
{
"epoch": 12.170212765957446,
"grad_norm": 0.04676933214068413,
"learning_rate": 1.705473473068282e-05,
"loss": 0.0056,
"step": 2860
},
{
"epoch": 12.212765957446809,
"grad_norm": 0.04721114784479141,
"learning_rate": 1.6560213747935503e-05,
"loss": 0.0062,
"step": 2870
},
{
"epoch": 12.25531914893617,
"grad_norm": 0.055576860904693604,
"learning_rate": 1.6072320296846898e-05,
"loss": 0.0058,
"step": 2880
},
{
"epoch": 12.297872340425531,
"grad_norm": 0.06054997444152832,
"learning_rate": 1.5591093130264334e-05,
"loss": 0.0057,
"step": 2890
},
{
"epoch": 12.340425531914894,
"grad_norm": 0.054234158247709274,
"learning_rate": 1.5116570471539293e-05,
"loss": 0.0057,
"step": 2900
},
{
"epoch": 12.382978723404255,
"grad_norm": 0.05556226149201393,
"learning_rate": 1.4648790011491542e-05,
"loss": 0.0057,
"step": 2910
},
{
"epoch": 12.425531914893616,
"grad_norm": 0.055104803293943405,
"learning_rate": 1.4187788905415334e-05,
"loss": 0.0059,
"step": 2920
},
{
"epoch": 12.46808510638298,
"grad_norm": 0.04338991269469261,
"learning_rate": 1.3733603770128112e-05,
"loss": 0.0059,
"step": 2930
},
{
"epoch": 12.51063829787234,
"grad_norm": 0.04588828980922699,
"learning_rate": 1.3286270681062274e-05,
"loss": 0.006,
"step": 2940
},
{
"epoch": 12.553191489361701,
"grad_norm": 0.05913781002163887,
"learning_rate": 1.2845825169399507e-05,
"loss": 0.0063,
"step": 2950
},
{
"epoch": 12.595744680851064,
"grad_norm": 0.056119441986083984,
"learning_rate": 1.2412302219248805e-05,
"loss": 0.0059,
"step": 2960
},
{
"epoch": 12.638297872340425,
"grad_norm": 0.05296126753091812,
"learning_rate": 1.1985736264867509e-05,
"loss": 0.006,
"step": 2970
},
{
"epoch": 12.680851063829786,
"grad_norm": 0.04919581860303879,
"learning_rate": 1.1566161187926439e-05,
"loss": 0.006,
"step": 2980
},
{
"epoch": 12.72340425531915,
"grad_norm": 0.04949197918176651,
"learning_rate": 1.115361031481853e-05,
"loss": 0.0057,
"step": 2990
},
{
"epoch": 12.76595744680851,
"grad_norm": 0.044637832790613174,
"learning_rate": 1.0748116414011888e-05,
"loss": 0.0055,
"step": 3000
},
{
"epoch": 12.808510638297872,
"grad_norm": 0.04936418682336807,
"learning_rate": 1.0349711693446918e-05,
"loss": 0.0059,
"step": 3010
},
{
"epoch": 12.851063829787234,
"grad_norm": 0.0362602099776268,
"learning_rate": 9.958427797978166e-06,
"loss": 0.0058,
"step": 3020
},
{
"epoch": 12.893617021276595,
"grad_norm": 0.05616355687379837,
"learning_rate": 9.574295806860767e-06,
"loss": 0.0059,
"step": 3030
},
{
"epoch": 12.936170212765958,
"grad_norm": 0.048922713845968246,
"learning_rate": 9.197346231281845e-06,
"loss": 0.0058,
"step": 3040
},
{
"epoch": 12.97872340425532,
"grad_norm": 0.08081869035959244,
"learning_rate": 8.827609011937066e-06,
"loss": 0.006,
"step": 3050
},
{
"epoch": 13.02127659574468,
"grad_norm": 0.06286204606294632,
"learning_rate": 8.465113516652424e-06,
"loss": 0.0057,
"step": 3060
},
{
"epoch": 13.063829787234043,
"grad_norm": 0.05016550421714783,
"learning_rate": 8.109888538051724e-06,
"loss": 0.0056,
"step": 3070
},
{
"epoch": 13.106382978723405,
"grad_norm": 0.04805108159780502,
"learning_rate": 7.761962291269408e-06,
"loss": 0.0056,
"step": 3080
},
{
"epoch": 13.148936170212766,
"grad_norm": 0.08138354867696762,
"learning_rate": 7.4213624117096755e-06,
"loss": 0.0057,
"step": 3090
},
{
"epoch": 13.191489361702128,
"grad_norm": 0.05341387912631035,
"learning_rate": 7.088115952851238e-06,
"loss": 0.0056,
"step": 3100
},
{
"epoch": 13.23404255319149,
"grad_norm": 0.06248362362384796,
"learning_rate": 6.762249384098662e-06,
"loss": 0.0059,
"step": 3110
},
{
"epoch": 13.27659574468085,
"grad_norm": 0.058334264904260635,
"learning_rate": 6.4437885886798224e-06,
"loss": 0.0057,
"step": 3120
},
{
"epoch": 13.319148936170214,
"grad_norm": 0.07496757060289383,
"learning_rate": 6.132758861590005e-06,
"loss": 0.0057,
"step": 3130
},
{
"epoch": 13.361702127659575,
"grad_norm": 0.04468085989356041,
"learning_rate": 5.829184907582896e-06,
"loss": 0.0058,
"step": 3140
},
{
"epoch": 13.404255319148936,
"grad_norm": 0.06133480742573738,
"learning_rate": 5.533090839208133e-06,
"loss": 0.0058,
"step": 3150
},
{
"epoch": 13.446808510638299,
"grad_norm": 0.056072745472192764,
"learning_rate": 5.244500174896205e-06,
"loss": 0.0057,
"step": 3160
},
{
"epoch": 13.48936170212766,
"grad_norm": 0.07432785630226135,
"learning_rate": 4.96343583709038e-06,
"loss": 0.0057,
"step": 3170
},
{
"epoch": 13.53191489361702,
"grad_norm": 0.051389969885349274,
"learning_rate": 4.68992015042592e-06,
"loss": 0.0056,
"step": 3180
},
{
"epoch": 13.574468085106384,
"grad_norm": 0.0631624385714531,
"learning_rate": 4.423974839956968e-06,
"loss": 0.0059,
"step": 3190
},
{
"epoch": 13.617021276595745,
"grad_norm": 0.0559026375412941,
"learning_rate": 4.165621029430855e-06,
"loss": 0.0057,
"step": 3200
},
{
"epoch": 13.659574468085106,
"grad_norm": 0.05802515894174576,
"learning_rate": 3.914879239610392e-06,
"loss": 0.0059,
"step": 3210
},
{
"epoch": 13.702127659574469,
"grad_norm": 0.06547726690769196,
"learning_rate": 3.671769386643742e-06,
"loss": 0.0058,
"step": 3220
},
{
"epoch": 13.74468085106383,
"grad_norm": 0.08117694407701492,
"learning_rate": 3.436310780482688e-06,
"loss": 0.0061,
"step": 3230
},
{
"epoch": 13.787234042553191,
"grad_norm": 0.059095997363328934,
"learning_rate": 3.2085221233487562e-06,
"loss": 0.0058,
"step": 3240
},
{
"epoch": 13.829787234042554,
"grad_norm": 0.05627988278865814,
"learning_rate": 2.9884215082477408e-06,
"loss": 0.0056,
"step": 3250
},
{
"epoch": 13.872340425531915,
"grad_norm": 0.04729423671960831,
"learning_rate": 2.776026417532629e-06,
"loss": 0.0058,
"step": 3260
},
{
"epoch": 13.914893617021276,
"grad_norm": 0.04625534638762474,
"learning_rate": 2.5713537215149132e-06,
"loss": 0.0056,
"step": 3270
},
{
"epoch": 13.957446808510639,
"grad_norm": 0.06575009226799011,
"learning_rate": 2.3744196771247173e-06,
"loss": 0.0057,
"step": 3280
},
{
"epoch": 14.0,
"grad_norm": 0.04723024740815163,
"learning_rate": 2.1852399266194314e-06,
"loss": 0.0057,
"step": 3290
},
{
"epoch": 14.042553191489361,
"grad_norm": 0.051644448190927505,
"learning_rate": 2.003829496341325e-06,
"loss": 0.0057,
"step": 3300
},
{
"epoch": 14.085106382978724,
"grad_norm": 0.05777151137590408,
"learning_rate": 1.8302027955239387e-06,
"loss": 0.0055,
"step": 3310
},
{
"epoch": 14.127659574468085,
"grad_norm": 0.06086675077676773,
"learning_rate": 1.6643736151477185e-06,
"loss": 0.0057,
"step": 3320
},
{
"epoch": 14.170212765957446,
"grad_norm": 0.05680638551712036,
"learning_rate": 1.5063551268444276e-06,
"loss": 0.0057,
"step": 3330
},
{
"epoch": 14.212765957446809,
"grad_norm": 0.05553250014781952,
"learning_rate": 1.3561598818511045e-06,
"loss": 0.0058,
"step": 3340
},
{
"epoch": 14.25531914893617,
"grad_norm": 0.05792253836989403,
"learning_rate": 1.21379981001305e-06,
"loss": 0.0057,
"step": 3350
},
{
"epoch": 14.297872340425531,
"grad_norm": 0.05049928277730942,
"learning_rate": 1.0792862188362396e-06,
"loss": 0.0059,
"step": 3360
},
{
"epoch": 14.340425531914894,
"grad_norm": 0.04483390226960182,
"learning_rate": 9.526297925892435e-07,
"loss": 0.0056,
"step": 3370
},
{
"epoch": 14.382978723404255,
"grad_norm": 0.052869729697704315,
"learning_rate": 8.338405914545045e-07,
"loss": 0.0058,
"step": 3380
},
{
"epoch": 14.425531914893616,
"grad_norm": 0.07982738316059113,
"learning_rate": 7.229280507293657e-07,
"loss": 0.006,
"step": 3390
},
{
"epoch": 14.46808510638298,
"grad_norm": 0.03856668993830681,
"learning_rate": 6.199009800765265e-07,
"loss": 0.0057,
"step": 3400
},
{
"epoch": 14.51063829787234,
"grad_norm": 0.07019790261983871,
"learning_rate": 5.24767562824402e-07,
"loss": 0.0057,
"step": 3410
},
{
"epoch": 14.553191489361701,
"grad_norm": 0.06207670271396637,
"learning_rate": 4.375353553170647e-07,
"loss": 0.0055,
"step": 3420
},
{
"epoch": 14.595744680851064,
"grad_norm": 0.07232890278100967,
"learning_rate": 3.5821128631408075e-07,
"loss": 0.0056,
"step": 3430
},
{
"epoch": 14.638297872340425,
"grad_norm": 0.05705394968390465,
"learning_rate": 2.86801656440161e-07,
"loss": 0.0059,
"step": 3440
},
{
"epoch": 14.680851063829786,
"grad_norm": 0.049011845141649246,
"learning_rate": 2.2331213768468363e-07,
"loss": 0.0057,
"step": 3450
},
{
"epoch": 14.72340425531915,
"grad_norm": 0.0473303347826004,
"learning_rate": 1.6774777295123223e-07,
"loss": 0.0056,
"step": 3460
},
{
"epoch": 14.76595744680851,
"grad_norm": 0.06919126957654953,
"learning_rate": 1.2011297565697188e-07,
"loss": 0.0056,
"step": 3470
},
{
"epoch": 14.808510638297872,
"grad_norm": 0.052880384027957916,
"learning_rate": 8.041152938216278e-08,
"loss": 0.0054,
"step": 3480
},
{
"epoch": 14.851063829787234,
"grad_norm": 0.08039422333240509,
"learning_rate": 4.8646587569578514e-08,
"loss": 0.0059,
"step": 3490
},
{
"epoch": 14.893617021276595,
"grad_norm": 0.060477934777736664,
"learning_rate": 2.482067327409521e-08,
"loss": 0.0056,
"step": 3500
},
{
"epoch": 14.936170212765958,
"grad_norm": 0.053076595067977905,
"learning_rate": 8.93567896219638e-09,
"loss": 0.0055,
"step": 3510
},
{
"epoch": 14.97872340425532,
"grad_norm": 0.0657854750752449,
"learning_rate": 9.9286636175977e-10,
"loss": 0.0057,
"step": 3520
}
],
"logging_steps": 10,
"max_steps": 3525,
"num_input_tokens_seen": 0,
"num_train_epochs": 15,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.417012575473664e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}