|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 1175, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 0.4270687699317932, |
|
"learning_rate": 0.00019999602855426865, |
|
"loss": 1.013, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 0.4152718782424927, |
|
"learning_rate": 0.00019998411453252217, |
|
"loss": 0.8289, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1276595744680851, |
|
"grad_norm": 0.7277560234069824, |
|
"learning_rate": 0.0001999642588810784, |
|
"loss": 0.5959, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 0.5505673885345459, |
|
"learning_rate": 0.00019993646317705016, |
|
"loss": 0.459, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2127659574468085, |
|
"grad_norm": 0.528052031993866, |
|
"learning_rate": 0.00019990072962822007, |
|
"loss": 0.3775, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 0.6307681202888489, |
|
"learning_rate": 0.00019985706107286514, |
|
"loss": 0.3285, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2978723404255319, |
|
"grad_norm": 0.6954013109207153, |
|
"learning_rate": 0.00019980546097953132, |
|
"loss": 0.2855, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 0.6790465116500854, |
|
"learning_rate": 0.000199745933446758, |
|
"loss": 0.2782, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3829787234042553, |
|
"grad_norm": 1.324937105178833, |
|
"learning_rate": 0.0001996784832027525, |
|
"loss": 0.2635, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 0.8779314756393433, |
|
"learning_rate": 0.00019960311560501454, |
|
"loss": 0.1861, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.46808510638297873, |
|
"grad_norm": 0.693745493888855, |
|
"learning_rate": 0.00019951983663991056, |
|
"loss": 0.2001, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5106382978723404, |
|
"grad_norm": 1.0649502277374268, |
|
"learning_rate": 0.00019942865292219838, |
|
"loss": 0.1378, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5531914893617021, |
|
"grad_norm": 0.6962260007858276, |
|
"learning_rate": 0.0001993295716945017, |
|
"loss": 0.1579, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5957446808510638, |
|
"grad_norm": 0.7934479713439941, |
|
"learning_rate": 0.00019922260082673497, |
|
"loss": 0.092, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6382978723404256, |
|
"grad_norm": 1.1331907510757446, |
|
"learning_rate": 0.000199107748815478, |
|
"loss": 0.1208, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6808510638297872, |
|
"grad_norm": 1.3689247369766235, |
|
"learning_rate": 0.00019898502478330152, |
|
"loss": 0.0874, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.723404255319149, |
|
"grad_norm": 0.5304535031318665, |
|
"learning_rate": 0.00019885443847804211, |
|
"loss": 0.0881, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7659574468085106, |
|
"grad_norm": 0.6805845499038696, |
|
"learning_rate": 0.0001987160002720283, |
|
"loss": 0.0584, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8085106382978723, |
|
"grad_norm": 0.2527499198913574, |
|
"learning_rate": 0.00019856972116125653, |
|
"loss": 0.08, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 0.799462616443634, |
|
"learning_rate": 0.0001984156127645178, |
|
"loss": 0.0556, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8936170212765957, |
|
"grad_norm": 0.936975359916687, |
|
"learning_rate": 0.0001982536873224748, |
|
"loss": 0.0945, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9361702127659575, |
|
"grad_norm": 0.8067993521690369, |
|
"learning_rate": 0.00019808395769668963, |
|
"loss": 0.0495, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9787234042553191, |
|
"grad_norm": 0.45767834782600403, |
|
"learning_rate": 0.00019790643736860227, |
|
"loss": 0.0617, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.0212765957446808, |
|
"grad_norm": 0.9198794364929199, |
|
"learning_rate": 0.00019772114043845965, |
|
"loss": 0.0467, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.0638297872340425, |
|
"grad_norm": 0.7327796816825867, |
|
"learning_rate": 0.0001975280816241959, |
|
"loss": 0.0391, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.1063829787234043, |
|
"grad_norm": 0.8003076910972595, |
|
"learning_rate": 0.00019732727626026305, |
|
"loss": 0.0428, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.148936170212766, |
|
"grad_norm": 0.10251367837190628, |
|
"learning_rate": 0.0001971187402964132, |
|
"loss": 0.032, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.1914893617021276, |
|
"grad_norm": 0.45093855261802673, |
|
"learning_rate": 0.00019690249029643162, |
|
"loss": 0.0673, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.2340425531914894, |
|
"grad_norm": 0.4845767915248871, |
|
"learning_rate": 0.0001966785434368211, |
|
"loss": 0.033, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.2765957446808511, |
|
"grad_norm": 0.31195056438446045, |
|
"learning_rate": 0.00019644691750543767, |
|
"loss": 0.0261, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.3191489361702127, |
|
"grad_norm": 0.14839951694011688, |
|
"learning_rate": 0.00019620763090007762, |
|
"loss": 0.0298, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.3617021276595744, |
|
"grad_norm": 0.20573872327804565, |
|
"learning_rate": 0.00019596070262701626, |
|
"loss": 0.0155, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.4042553191489362, |
|
"grad_norm": 0.47702595591545105, |
|
"learning_rate": 0.00019570615229949842, |
|
"loss": 0.0369, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.4468085106382977, |
|
"grad_norm": 0.7073186039924622, |
|
"learning_rate": 0.00019544400013618023, |
|
"loss": 0.0302, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.4893617021276595, |
|
"grad_norm": 0.1539478451013565, |
|
"learning_rate": 0.00019517426695952358, |
|
"loss": 0.0223, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.5319148936170213, |
|
"grad_norm": 0.5202814340591431, |
|
"learning_rate": 0.00019489697419414182, |
|
"loss": 0.0263, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.574468085106383, |
|
"grad_norm": 0.968192458152771, |
|
"learning_rate": 0.00019461214386509842, |
|
"loss": 0.044, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.6170212765957448, |
|
"grad_norm": 0.5662522912025452, |
|
"learning_rate": 0.00019431979859615726, |
|
"loss": 0.0421, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.6595744680851063, |
|
"grad_norm": 0.42925137281417847, |
|
"learning_rate": 0.00019401996160798573, |
|
"loss": 0.0606, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.702127659574468, |
|
"grad_norm": 0.5803830027580261, |
|
"learning_rate": 0.00019371265671631037, |
|
"loss": 0.0392, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.7446808510638299, |
|
"grad_norm": 0.4235450327396393, |
|
"learning_rate": 0.00019339790833002515, |
|
"loss": 0.0286, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.7872340425531914, |
|
"grad_norm": 0.519207775592804, |
|
"learning_rate": 0.00019307574144925287, |
|
"loss": 0.0522, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.8297872340425532, |
|
"grad_norm": 0.2344844490289688, |
|
"learning_rate": 0.00019274618166335912, |
|
"loss": 0.0281, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.872340425531915, |
|
"grad_norm": 0.1990007758140564, |
|
"learning_rate": 0.00019240925514892, |
|
"loss": 0.0229, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.9148936170212765, |
|
"grad_norm": 0.10929415374994278, |
|
"learning_rate": 0.00019206498866764288, |
|
"loss": 0.0258, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.9574468085106385, |
|
"grad_norm": 0.4308103024959564, |
|
"learning_rate": 0.00019171340956424074, |
|
"loss": 0.0167, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.46525439620018005, |
|
"learning_rate": 0.0001913545457642601, |
|
"loss": 0.0283, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.0425531914893615, |
|
"grad_norm": 0.28837406635284424, |
|
"learning_rate": 0.00019098842577186314, |
|
"loss": 0.0137, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.0851063829787235, |
|
"grad_norm": 0.08533861488103867, |
|
"learning_rate": 0.00019061507866756347, |
|
"loss": 0.0182, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.127659574468085, |
|
"grad_norm": 0.3499375581741333, |
|
"learning_rate": 0.00019023453410591635, |
|
"loss": 0.0221, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.1702127659574466, |
|
"grad_norm": 0.07716694474220276, |
|
"learning_rate": 0.00018984682231316333, |
|
"loss": 0.0075, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.2127659574468086, |
|
"grad_norm": 0.3093757927417755, |
|
"learning_rate": 0.00018945197408483123, |
|
"loss": 0.0133, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.25531914893617, |
|
"grad_norm": 0.13492655754089355, |
|
"learning_rate": 0.00018905002078328632, |
|
"loss": 0.0184, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.297872340425532, |
|
"grad_norm": 0.07833054661750793, |
|
"learning_rate": 0.000188640994335243, |
|
"loss": 0.0109, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.3404255319148937, |
|
"grad_norm": 0.08865915983915329, |
|
"learning_rate": 0.0001882249272292282, |
|
"loss": 0.0121, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.382978723404255, |
|
"grad_norm": 0.31314581632614136, |
|
"learning_rate": 0.00018780185251300046, |
|
"loss": 0.0242, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.425531914893617, |
|
"grad_norm": 0.10387410968542099, |
|
"learning_rate": 0.00018737180379092537, |
|
"loss": 0.0285, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.4680851063829787, |
|
"grad_norm": 0.11755700409412384, |
|
"learning_rate": 0.0001869348152213061, |
|
"loss": 0.0281, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.5106382978723403, |
|
"grad_norm": 0.1438552439212799, |
|
"learning_rate": 0.0001864909215136705, |
|
"loss": 0.0216, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.5531914893617023, |
|
"grad_norm": 0.08940370380878448, |
|
"learning_rate": 0.00018604015792601396, |
|
"loss": 0.0226, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.595744680851064, |
|
"grad_norm": 0.19193390011787415, |
|
"learning_rate": 0.00018558256026199896, |
|
"loss": 0.0184, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.6382978723404253, |
|
"grad_norm": 0.4586654603481293, |
|
"learning_rate": 0.00018511816486811134, |
|
"loss": 0.0154, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.6808510638297873, |
|
"grad_norm": 0.05811255797743797, |
|
"learning_rate": 0.00018464700863077312, |
|
"loss": 0.0238, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.723404255319149, |
|
"grad_norm": 0.16280680894851685, |
|
"learning_rate": 0.00018416912897341295, |
|
"loss": 0.0253, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.7659574468085104, |
|
"grad_norm": 0.07031189650297165, |
|
"learning_rate": 0.00018368456385349334, |
|
"loss": 0.0146, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.8085106382978724, |
|
"grad_norm": 0.07851342856884003, |
|
"learning_rate": 0.0001831933517594957, |
|
"loss": 0.0301, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.851063829787234, |
|
"grad_norm": 0.11461298912763596, |
|
"learning_rate": 0.0001826955317078636, |
|
"loss": 0.0155, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.8936170212765955, |
|
"grad_norm": 0.1839868575334549, |
|
"learning_rate": 0.00018219114323990345, |
|
"loss": 0.0099, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.9361702127659575, |
|
"grad_norm": 0.04199373722076416, |
|
"learning_rate": 0.00018168022641864377, |
|
"loss": 0.0192, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.978723404255319, |
|
"grad_norm": 0.3203773498535156, |
|
"learning_rate": 0.00018116282182565311, |
|
"loss": 0.0272, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.021276595744681, |
|
"grad_norm": 0.327921599149704, |
|
"learning_rate": 0.0001806389705578168, |
|
"loss": 0.0081, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.0638297872340425, |
|
"grad_norm": 0.29155433177948, |
|
"learning_rate": 0.00018010871422407236, |
|
"loss": 0.0128, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.106382978723404, |
|
"grad_norm": 0.06926452368497849, |
|
"learning_rate": 0.00017957209494210493, |
|
"loss": 0.0171, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.148936170212766, |
|
"grad_norm": 0.08231089264154434, |
|
"learning_rate": 0.0001790291553350016, |
|
"loss": 0.0098, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.1914893617021276, |
|
"grad_norm": 0.14707215130329132, |
|
"learning_rate": 0.0001784799385278661, |
|
"loss": 0.0092, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.2340425531914896, |
|
"grad_norm": 0.2735896706581116, |
|
"learning_rate": 0.00017792448814439333, |
|
"loss": 0.0115, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.276595744680851, |
|
"grad_norm": 0.44960370659828186, |
|
"learning_rate": 0.00017736284830340436, |
|
"loss": 0.0195, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.3191489361702127, |
|
"grad_norm": 0.13413724303245544, |
|
"learning_rate": 0.00017679506361534215, |
|
"loss": 0.0187, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.3617021276595747, |
|
"grad_norm": 0.24698686599731445, |
|
"learning_rate": 0.00017622117917872823, |
|
"loss": 0.0125, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.404255319148936, |
|
"grad_norm": 0.48694342374801636, |
|
"learning_rate": 0.00017564124057658056, |
|
"loss": 0.0234, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.4468085106382977, |
|
"grad_norm": 0.6931429505348206, |
|
"learning_rate": 0.00017505529387279277, |
|
"loss": 0.0234, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.4893617021276597, |
|
"grad_norm": 0.13700473308563232, |
|
"learning_rate": 0.00017446338560847568, |
|
"loss": 0.0145, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.5319148936170213, |
|
"grad_norm": 0.3254775404930115, |
|
"learning_rate": 0.00017386556279826021, |
|
"loss": 0.0179, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.574468085106383, |
|
"grad_norm": 0.368379682302475, |
|
"learning_rate": 0.00017326187292656333, |
|
"loss": 0.013, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.617021276595745, |
|
"grad_norm": 0.2745888829231262, |
|
"learning_rate": 0.00017265236394381633, |
|
"loss": 0.0136, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.6595744680851063, |
|
"grad_norm": 0.0781714916229248, |
|
"learning_rate": 0.00017203708426265614, |
|
"loss": 0.0126, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.702127659574468, |
|
"grad_norm": 0.7518234848976135, |
|
"learning_rate": 0.00017141608275408006, |
|
"loss": 0.0134, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.74468085106383, |
|
"grad_norm": 0.4271518886089325, |
|
"learning_rate": 0.00017078940874356392, |
|
"loss": 0.0127, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.7872340425531914, |
|
"grad_norm": 0.34387120604515076, |
|
"learning_rate": 0.00017015711200714414, |
|
"loss": 0.0161, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.829787234042553, |
|
"grad_norm": 0.09641717374324799, |
|
"learning_rate": 0.00016951924276746425, |
|
"loss": 0.0185, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.872340425531915, |
|
"grad_norm": 0.44215765595436096, |
|
"learning_rate": 0.00016887585168978562, |
|
"loss": 0.0115, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.9148936170212765, |
|
"grad_norm": 0.06954783201217651, |
|
"learning_rate": 0.0001682269898779632, |
|
"loss": 0.0121, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.9574468085106385, |
|
"grad_norm": 0.21080243587493896, |
|
"learning_rate": 0.00016757270887038654, |
|
"loss": 0.0125, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.3382236063480377, |
|
"learning_rate": 0.00016691306063588583, |
|
"loss": 0.0109, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.042553191489362, |
|
"grad_norm": 0.36123183369636536, |
|
"learning_rate": 0.00016624809756960444, |
|
"loss": 0.0093, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.085106382978723, |
|
"grad_norm": 0.45228853821754456, |
|
"learning_rate": 0.00016557787248883696, |
|
"loss": 0.0117, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.127659574468085, |
|
"grad_norm": 0.2724202275276184, |
|
"learning_rate": 0.00016490243862883413, |
|
"loss": 0.0126, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.170212765957447, |
|
"grad_norm": 0.17904357612133026, |
|
"learning_rate": 0.00016422184963857432, |
|
"loss": 0.0103, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.212765957446808, |
|
"grad_norm": 0.4267734885215759, |
|
"learning_rate": 0.00016353615957650236, |
|
"loss": 0.0105, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.25531914893617, |
|
"grad_norm": 0.11816457659006119, |
|
"learning_rate": 0.00016284542290623567, |
|
"loss": 0.0097, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.297872340425532, |
|
"grad_norm": 0.04166145250201225, |
|
"learning_rate": 0.00016214969449223824, |
|
"loss": 0.0116, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.340425531914893, |
|
"grad_norm": 0.0687410831451416, |
|
"learning_rate": 0.00016144902959546286, |
|
"loss": 0.0088, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.382978723404255, |
|
"grad_norm": 0.37416237592697144, |
|
"learning_rate": 0.00016074348386896177, |
|
"loss": 0.019, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.425531914893617, |
|
"grad_norm": 0.06069188937544823, |
|
"learning_rate": 0.00016003311335346636, |
|
"loss": 0.0138, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.468085106382979, |
|
"grad_norm": 0.0817495658993721, |
|
"learning_rate": 0.00015931797447293552, |
|
"loss": 0.0084, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.51063829787234, |
|
"grad_norm": 0.09787007421255112, |
|
"learning_rate": 0.00015859812403007443, |
|
"loss": 0.009, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.553191489361702, |
|
"grad_norm": 0.06179153174161911, |
|
"learning_rate": 0.0001578736192018224, |
|
"loss": 0.008, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.595744680851064, |
|
"grad_norm": 0.3092339038848877, |
|
"learning_rate": 0.00015714451753481168, |
|
"loss": 0.0131, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.638297872340425, |
|
"grad_norm": 0.06405780464410782, |
|
"learning_rate": 0.0001564108769407962, |
|
"loss": 0.0122, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.680851063829787, |
|
"grad_norm": 0.21473410725593567, |
|
"learning_rate": 0.00015567275569205218, |
|
"loss": 0.0117, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.723404255319149, |
|
"grad_norm": 0.3080317974090576, |
|
"learning_rate": 0.00015493021241674918, |
|
"loss": 0.011, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.76595744680851, |
|
"grad_norm": 0.6501839756965637, |
|
"learning_rate": 0.0001541833060942937, |
|
"loss": 0.0124, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.808510638297872, |
|
"grad_norm": 0.04628886282444, |
|
"learning_rate": 0.00015343209605064422, |
|
"loss": 0.0082, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.851063829787234, |
|
"grad_norm": 0.059793177992105484, |
|
"learning_rate": 0.00015267664195359917, |
|
"loss": 0.0072, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.8936170212765955, |
|
"grad_norm": 0.052162788808345795, |
|
"learning_rate": 0.00015191700380805752, |
|
"loss": 0.0095, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.9361702127659575, |
|
"grad_norm": 0.06147542968392372, |
|
"learning_rate": 0.00015115324195125274, |
|
"loss": 0.0098, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.9787234042553195, |
|
"grad_norm": 0.6586833000183105, |
|
"learning_rate": 0.00015038541704796003, |
|
"loss": 0.0139, |
|
"step": 1170 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3525, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 15, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.72337525157888e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|