|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 50.0, |
|
"eval_steps": 500, |
|
"global_step": 13500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.20531712472438812, |
|
"learning_rate": 0.001998518518518519, |
|
"loss": 2.6189, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.22602832317352295, |
|
"learning_rate": 0.001997037037037037, |
|
"loss": 2.5836, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.21022845804691315, |
|
"learning_rate": 0.0019955555555555555, |
|
"loss": 2.5439, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.17906232178211212, |
|
"learning_rate": 0.0019940740740740743, |
|
"loss": 2.6242, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.19077719748020172, |
|
"learning_rate": 0.0019925925925925927, |
|
"loss": 2.5929, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.25262829661369324, |
|
"learning_rate": 0.001991111111111111, |
|
"loss": 2.5714, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.32239940762519836, |
|
"learning_rate": 0.00198962962962963, |
|
"loss": 2.5695, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.17224617302417755, |
|
"learning_rate": 0.001988148148148148, |
|
"loss": 2.5651, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.17106947302818298, |
|
"learning_rate": 0.0019866666666666665, |
|
"loss": 2.6357, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.1687716841697693, |
|
"learning_rate": 0.0019851851851851853, |
|
"loss": 2.5754, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.17471371591091156, |
|
"learning_rate": 0.0019837037037037037, |
|
"loss": 2.5687, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.16484995186328888, |
|
"learning_rate": 0.0019822222222222225, |
|
"loss": 2.5992, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.16195523738861084, |
|
"learning_rate": 0.001980740740740741, |
|
"loss": 2.6108, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.1896306574344635, |
|
"learning_rate": 0.001979259259259259, |
|
"loss": 2.6148, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1916884034872055, |
|
"learning_rate": 0.001977777777777778, |
|
"loss": 2.589, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.20058733224868774, |
|
"learning_rate": 0.0019762962962962963, |
|
"loss": 2.5799, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.16014626622200012, |
|
"learning_rate": 0.0019748148148148147, |
|
"loss": 2.5529, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.15680766105651855, |
|
"learning_rate": 0.0019733333333333334, |
|
"loss": 2.562, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.17019112408161163, |
|
"learning_rate": 0.001971851851851852, |
|
"loss": 2.579, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.16960741579532623, |
|
"learning_rate": 0.00197037037037037, |
|
"loss": 2.5542, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.17356687784194946, |
|
"learning_rate": 0.001968888888888889, |
|
"loss": 2.5641, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.17307817935943604, |
|
"learning_rate": 0.0019674074074074077, |
|
"loss": 2.5962, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.1674201637506485, |
|
"learning_rate": 0.001965925925925926, |
|
"loss": 2.5518, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.15180547535419464, |
|
"learning_rate": 0.0019644444444444444, |
|
"loss": 2.5919, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.166998952627182, |
|
"learning_rate": 0.0019629629629629632, |
|
"loss": 2.5684, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.16316364705562592, |
|
"learning_rate": 0.0019614814814814816, |
|
"loss": 2.603, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.2779152989387512, |
|
"learning_rate": 0.00196, |
|
"loss": 2.6232, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.17026250064373016, |
|
"learning_rate": 0.0019585185185185187, |
|
"loss": 2.4226, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.16963163018226624, |
|
"learning_rate": 0.001957037037037037, |
|
"loss": 2.4307, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.1799466907978058, |
|
"learning_rate": 0.0019555555555555554, |
|
"loss": 2.4621, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.17876045405864716, |
|
"learning_rate": 0.0019540740740740742, |
|
"loss": 2.4945, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.1827176958322525, |
|
"learning_rate": 0.0019525925925925928, |
|
"loss": 2.4547, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.20249758660793304, |
|
"learning_rate": 0.0019511111111111111, |
|
"loss": 2.4662, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.19642537832260132, |
|
"learning_rate": 0.0019496296296296297, |
|
"loss": 2.5019, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.2031635344028473, |
|
"learning_rate": 0.0019481481481481483, |
|
"loss": 2.4459, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.20372791588306427, |
|
"learning_rate": 0.0019466666666666669, |
|
"loss": 2.477, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.20827481150627136, |
|
"learning_rate": 0.0019451851851851852, |
|
"loss": 2.5037, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.1974320113658905, |
|
"learning_rate": 0.0019437037037037038, |
|
"loss": 2.5155, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.2353196144104004, |
|
"learning_rate": 0.0019422222222222224, |
|
"loss": 2.5645, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 0.22184617817401886, |
|
"learning_rate": 0.0019407407407407407, |
|
"loss": 2.4933, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 0.20222437381744385, |
|
"learning_rate": 0.0019392592592592593, |
|
"loss": 2.4861, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.19691778719425201, |
|
"learning_rate": 0.0019377777777777778, |
|
"loss": 2.4869, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 0.19557970762252808, |
|
"learning_rate": 0.0019362962962962964, |
|
"loss": 2.4886, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.18064908683300018, |
|
"learning_rate": 0.0019348148148148148, |
|
"loss": 2.5016, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 0.20412351191043854, |
|
"learning_rate": 0.0019333333333333333, |
|
"loss": 2.5193, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.19166867434978485, |
|
"learning_rate": 0.001931851851851852, |
|
"loss": 2.5526, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.18850873410701752, |
|
"learning_rate": 0.0019303703703703703, |
|
"loss": 2.5297, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 0.1931956559419632, |
|
"learning_rate": 0.0019288888888888888, |
|
"loss": 2.5621, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.20721589028835297, |
|
"learning_rate": 0.0019274074074074074, |
|
"loss": 2.5265, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 0.17977182567119598, |
|
"learning_rate": 0.0019259259259259258, |
|
"loss": 2.4947, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.19090428948402405, |
|
"learning_rate": 0.0019244444444444443, |
|
"loss": 2.5391, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.1727808117866516, |
|
"learning_rate": 0.0019229629629629631, |
|
"loss": 2.5453, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.20896504819393158, |
|
"learning_rate": 0.0019214814814814817, |
|
"loss": 2.5575, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.3333798050880432, |
|
"learning_rate": 0.00192, |
|
"loss": 2.506, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.2123243808746338, |
|
"learning_rate": 0.0019185185185185186, |
|
"loss": 2.3596, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.20756013691425323, |
|
"learning_rate": 0.0019170370370370372, |
|
"loss": 2.3598, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.25747150182724, |
|
"learning_rate": 0.0019155555555555555, |
|
"loss": 2.3519, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.21804387867450714, |
|
"learning_rate": 0.0019140740740740741, |
|
"loss": 2.3626, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.21577107906341553, |
|
"learning_rate": 0.0019125925925925927, |
|
"loss": 2.3083, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.2492288202047348, |
|
"learning_rate": 0.0019111111111111113, |
|
"loss": 2.3581, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.26941046118736267, |
|
"learning_rate": 0.0019096296296296296, |
|
"loss": 2.3776, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.21193154156208038, |
|
"learning_rate": 0.0019081481481481482, |
|
"loss": 2.4152, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.2306254655122757, |
|
"learning_rate": 0.0019066666666666668, |
|
"loss": 2.3869, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.2227521389722824, |
|
"learning_rate": 0.001905185185185185, |
|
"loss": 2.3928, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.2545499801635742, |
|
"learning_rate": 0.0019037037037037037, |
|
"loss": 2.3672, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 0.24774783849716187, |
|
"learning_rate": 0.0019022222222222222, |
|
"loss": 2.3561, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.22088710963726044, |
|
"learning_rate": 0.0019007407407407408, |
|
"loss": 2.4168, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.2693859338760376, |
|
"learning_rate": 0.0018992592592592592, |
|
"loss": 2.4042, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 0.22340671718120575, |
|
"learning_rate": 0.0018977777777777777, |
|
"loss": 2.4334, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 0.24626752734184265, |
|
"learning_rate": 0.0018962962962962963, |
|
"loss": 2.4132, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.22658760845661163, |
|
"learning_rate": 0.0018948148148148147, |
|
"loss": 2.4186, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.22482579946517944, |
|
"learning_rate": 0.0018933333333333335, |
|
"loss": 2.4567, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.2270815223455429, |
|
"learning_rate": 0.001891851851851852, |
|
"loss": 2.4324, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.2389022558927536, |
|
"learning_rate": 0.0018903703703703706, |
|
"loss": 2.4357, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.2350894808769226, |
|
"learning_rate": 0.001888888888888889, |
|
"loss": 2.4331, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.21408489346504211, |
|
"learning_rate": 0.0018874074074074075, |
|
"loss": 2.4596, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.21101674437522888, |
|
"learning_rate": 0.001885925925925926, |
|
"loss": 2.4607, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.22046928107738495, |
|
"learning_rate": 0.0018844444444444444, |
|
"loss": 2.4497, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.20029981434345245, |
|
"learning_rate": 0.001882962962962963, |
|
"loss": 2.4479, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.20895542204380035, |
|
"learning_rate": 0.0018814814814814816, |
|
"loss": 2.4687, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.5576801300048828, |
|
"learning_rate": 0.00188, |
|
"loss": 2.4626, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 0.2836795449256897, |
|
"learning_rate": 0.0018785185185185185, |
|
"loss": 2.2582, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 0.253925085067749, |
|
"learning_rate": 0.001877037037037037, |
|
"loss": 2.2339, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 0.26034608483314514, |
|
"learning_rate": 0.0018755555555555557, |
|
"loss": 2.2487, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 0.255710244178772, |
|
"learning_rate": 0.001874074074074074, |
|
"loss": 2.2417, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.3075387477874756, |
|
"learning_rate": 0.0018725925925925926, |
|
"loss": 2.2657, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 0.2657618224620819, |
|
"learning_rate": 0.0018711111111111112, |
|
"loss": 2.3137, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 0.2990604639053345, |
|
"learning_rate": 0.0018696296296296295, |
|
"loss": 2.2845, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 0.24910998344421387, |
|
"learning_rate": 0.001868148148148148, |
|
"loss": 2.2986, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 0.27236661314964294, |
|
"learning_rate": 0.0018666666666666666, |
|
"loss": 2.3025, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 0.27428188920021057, |
|
"learning_rate": 0.0018651851851851852, |
|
"loss": 2.2739, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 0.26426592469215393, |
|
"learning_rate": 0.0018637037037037036, |
|
"loss": 2.3701, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.24107518792152405, |
|
"learning_rate": 0.0018622222222222224, |
|
"loss": 2.3512, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.26364269852638245, |
|
"learning_rate": 0.001860740740740741, |
|
"loss": 2.3045, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.3026023209095001, |
|
"learning_rate": 0.0018592592592592593, |
|
"loss": 2.3437, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 0.2444707602262497, |
|
"learning_rate": 0.0018577777777777779, |
|
"loss": 2.3434, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 0.2680145502090454, |
|
"learning_rate": 0.0018562962962962964, |
|
"loss": 2.3478, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 0.25497835874557495, |
|
"learning_rate": 0.001854814814814815, |
|
"loss": 2.3419, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 0.2542170584201813, |
|
"learning_rate": 0.0018533333333333334, |
|
"loss": 2.3547, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.2502199411392212, |
|
"learning_rate": 0.001851851851851852, |
|
"loss": 2.3492, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 0.23490004241466522, |
|
"learning_rate": 0.0018503703703703705, |
|
"loss": 2.3451, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 0.2701961100101471, |
|
"learning_rate": 0.0018488888888888888, |
|
"loss": 2.3778, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 0.27240490913391113, |
|
"learning_rate": 0.0018474074074074074, |
|
"loss": 2.3627, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 0.2597067952156067, |
|
"learning_rate": 0.001845925925925926, |
|
"loss": 2.3849, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 0.24304194748401642, |
|
"learning_rate": 0.0018444444444444446, |
|
"loss": 2.3623, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 0.24453286826610565, |
|
"learning_rate": 0.001842962962962963, |
|
"loss": 2.3752, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 0.24190236628055573, |
|
"learning_rate": 0.0018414814814814815, |
|
"loss": 2.4014, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.5276634693145752, |
|
"learning_rate": 0.00184, |
|
"loss": 2.3874, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 0.26895132660865784, |
|
"learning_rate": 0.0018385185185185184, |
|
"loss": 2.1745, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 0.2788925766944885, |
|
"learning_rate": 0.001837037037037037, |
|
"loss": 2.142, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 0.28623485565185547, |
|
"learning_rate": 0.0018355555555555556, |
|
"loss": 2.166, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 0.2977291941642761, |
|
"learning_rate": 0.001834074074074074, |
|
"loss": 2.1839, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 0.30595630407333374, |
|
"learning_rate": 0.0018325925925925927, |
|
"loss": 2.2183, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"grad_norm": 0.30192676186561584, |
|
"learning_rate": 0.0018311111111111113, |
|
"loss": 2.1944, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 0.29342707991600037, |
|
"learning_rate": 0.0018296296296296298, |
|
"loss": 2.2278, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 0.28939688205718994, |
|
"learning_rate": 0.0018281481481481482, |
|
"loss": 2.2185, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 0.30929163098335266, |
|
"learning_rate": 0.0018266666666666668, |
|
"loss": 2.2323, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 0.2740519046783447, |
|
"learning_rate": 0.0018251851851851853, |
|
"loss": 2.2288, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 0.27721187472343445, |
|
"learning_rate": 0.0018237037037037037, |
|
"loss": 2.2596, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.28971293568611145, |
|
"learning_rate": 0.0018222222222222223, |
|
"loss": 2.255, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.27458974719047546, |
|
"learning_rate": 0.0018207407407407408, |
|
"loss": 2.2531, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 0.2680809795856476, |
|
"learning_rate": 0.0018192592592592594, |
|
"loss": 2.2601, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 0.27959805727005005, |
|
"learning_rate": 0.0018177777777777778, |
|
"loss": 2.3105, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 0.2920687794685364, |
|
"learning_rate": 0.0018162962962962963, |
|
"loss": 2.2636, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 0.2851353585720062, |
|
"learning_rate": 0.001814814814814815, |
|
"loss": 2.2839, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 0.2648279368877411, |
|
"learning_rate": 0.0018133333333333332, |
|
"loss": 2.2682, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 0.29693925380706787, |
|
"learning_rate": 0.0018118518518518518, |
|
"loss": 2.3228, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 0.2693846523761749, |
|
"learning_rate": 0.0018103703703703704, |
|
"loss": 2.2764, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 0.29114627838134766, |
|
"learning_rate": 0.001808888888888889, |
|
"loss": 2.2741, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 0.3028624951839447, |
|
"learning_rate": 0.0018074074074074073, |
|
"loss": 2.3024, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"grad_norm": 0.2751203775405884, |
|
"learning_rate": 0.0018059259259259259, |
|
"loss": 2.3401, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 0.2969362437725067, |
|
"learning_rate": 0.0018044444444444445, |
|
"loss": 2.3181, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 0.2908598780632019, |
|
"learning_rate": 0.0018029629629629628, |
|
"loss": 2.3443, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.29255783557891846, |
|
"learning_rate": 0.0018014814814814816, |
|
"loss": 2.3673, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.54734867811203, |
|
"learning_rate": 0.0018000000000000002, |
|
"loss": 2.3092, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 0.3392738997936249, |
|
"learning_rate": 0.0017985185185185187, |
|
"loss": 2.053, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"grad_norm": 0.3059435784816742, |
|
"learning_rate": 0.001797037037037037, |
|
"loss": 2.0656, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 0.3064379394054413, |
|
"learning_rate": 0.0017955555555555557, |
|
"loss": 2.1344, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"grad_norm": 0.3037875294685364, |
|
"learning_rate": 0.0017940740740740742, |
|
"loss": 2.1175, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 0.3414519727230072, |
|
"learning_rate": 0.0017925925925925926, |
|
"loss": 2.1025, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 0.31483858823776245, |
|
"learning_rate": 0.0017911111111111112, |
|
"loss": 2.1406, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 0.3169196546077728, |
|
"learning_rate": 0.0017896296296296297, |
|
"loss": 2.1202, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"grad_norm": 0.32005852460861206, |
|
"learning_rate": 0.001788148148148148, |
|
"loss": 2.165, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 0.32935044169425964, |
|
"learning_rate": 0.0017866666666666667, |
|
"loss": 2.19, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 0.3298720419406891, |
|
"learning_rate": 0.0017851851851851852, |
|
"loss": 2.2113, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 0.3138920068740845, |
|
"learning_rate": 0.0017837037037037038, |
|
"loss": 2.1645, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 0.31499677896499634, |
|
"learning_rate": 0.0017822222222222222, |
|
"loss": 2.1918, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"grad_norm": 0.31354495882987976, |
|
"learning_rate": 0.0017807407407407407, |
|
"loss": 2.2005, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 0.2973974943161011, |
|
"learning_rate": 0.0017792592592592593, |
|
"loss": 2.2388, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 0.29360270500183105, |
|
"learning_rate": 0.0017777777777777776, |
|
"loss": 2.2541, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 0.29777848720550537, |
|
"learning_rate": 0.0017762962962962962, |
|
"loss": 2.2157, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"grad_norm": 0.3189815878868103, |
|
"learning_rate": 0.0017748148148148148, |
|
"loss": 2.2672, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 0.30607324838638306, |
|
"learning_rate": 0.0017733333333333334, |
|
"loss": 2.2685, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"grad_norm": 0.3085130751132965, |
|
"learning_rate": 0.001771851851851852, |
|
"loss": 2.2189, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 0.3023441433906555, |
|
"learning_rate": 0.0017703703703703705, |
|
"loss": 2.2097, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"grad_norm": 0.29119688272476196, |
|
"learning_rate": 0.001768888888888889, |
|
"loss": 2.225, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 0.2987782657146454, |
|
"learning_rate": 0.0017674074074074074, |
|
"loss": 2.282, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 0.31132954359054565, |
|
"learning_rate": 0.001765925925925926, |
|
"loss": 2.2924, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 0.29663658142089844, |
|
"learning_rate": 0.0017644444444444446, |
|
"loss": 2.2517, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"grad_norm": 0.28728464245796204, |
|
"learning_rate": 0.0017629629629629631, |
|
"loss": 2.2853, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"grad_norm": 0.29688742756843567, |
|
"learning_rate": 0.0017614814814814815, |
|
"loss": 2.2909, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.765344500541687, |
|
"learning_rate": 0.00176, |
|
"loss": 2.2834, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"grad_norm": 0.3417266309261322, |
|
"learning_rate": 0.0017585185185185186, |
|
"loss": 2.0025, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"grad_norm": 0.34385794401168823, |
|
"learning_rate": 0.001757037037037037, |
|
"loss": 1.9865, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"grad_norm": 0.31611743569374084, |
|
"learning_rate": 0.0017555555555555556, |
|
"loss": 2.0147, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"grad_norm": 0.33877480030059814, |
|
"learning_rate": 0.0017540740740740741, |
|
"loss": 2.0638, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 0.33535125851631165, |
|
"learning_rate": 0.0017525925925925927, |
|
"loss": 2.0912, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"grad_norm": 0.34495416283607483, |
|
"learning_rate": 0.001751111111111111, |
|
"loss": 2.1032, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"grad_norm": 0.34369274973869324, |
|
"learning_rate": 0.0017496296296296296, |
|
"loss": 2.1167, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"grad_norm": 0.34757956862449646, |
|
"learning_rate": 0.0017481481481481482, |
|
"loss": 2.1166, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"grad_norm": 0.35368654131889343, |
|
"learning_rate": 0.0017466666666666665, |
|
"loss": 2.1397, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"grad_norm": 0.341086745262146, |
|
"learning_rate": 0.0017451851851851851, |
|
"loss": 2.1334, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"grad_norm": 0.36962762475013733, |
|
"learning_rate": 0.0017437037037037037, |
|
"loss": 2.141, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"grad_norm": 0.3078432083129883, |
|
"learning_rate": 0.001742222222222222, |
|
"loss": 2.1514, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"grad_norm": 0.34287720918655396, |
|
"learning_rate": 0.0017407407407407408, |
|
"loss": 2.1918, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 0.315064936876297, |
|
"learning_rate": 0.0017392592592592594, |
|
"loss": 2.1685, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"grad_norm": 0.31131112575531006, |
|
"learning_rate": 0.001737777777777778, |
|
"loss": 2.1827, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"grad_norm": 0.3834725022315979, |
|
"learning_rate": 0.0017362962962962963, |
|
"loss": 2.1857, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"grad_norm": 0.34319067001342773, |
|
"learning_rate": 0.001734814814814815, |
|
"loss": 2.2084, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"grad_norm": 0.29598793387413025, |
|
"learning_rate": 0.0017333333333333335, |
|
"loss": 2.1508, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"grad_norm": 0.3391283452510834, |
|
"learning_rate": 0.0017318518518518518, |
|
"loss": 2.1681, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"grad_norm": 0.3115655183792114, |
|
"learning_rate": 0.0017303703703703704, |
|
"loss": 2.2049, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"grad_norm": 0.3322307765483856, |
|
"learning_rate": 0.001728888888888889, |
|
"loss": 2.2095, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"grad_norm": 0.32754597067832947, |
|
"learning_rate": 0.0017274074074074075, |
|
"loss": 2.2035, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"grad_norm": 0.31123578548431396, |
|
"learning_rate": 0.0017259259259259259, |
|
"loss": 2.2203, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"grad_norm": 0.32000821828842163, |
|
"learning_rate": 0.0017244444444444445, |
|
"loss": 2.1779, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"grad_norm": 0.3137577474117279, |
|
"learning_rate": 0.001722962962962963, |
|
"loss": 2.2027, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 0.31178486347198486, |
|
"learning_rate": 0.0017214814814814814, |
|
"loss": 2.2265, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 0.5970402956008911, |
|
"learning_rate": 0.00172, |
|
"loss": 2.184, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 0.351868212223053, |
|
"learning_rate": 0.0017185185185185185, |
|
"loss": 1.9941, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"grad_norm": 0.37474524974823, |
|
"learning_rate": 0.001717037037037037, |
|
"loss": 1.9817, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"grad_norm": 0.40310508012771606, |
|
"learning_rate": 0.0017155555555555555, |
|
"loss": 2.0109, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"grad_norm": 0.40088704228401184, |
|
"learning_rate": 0.001714074074074074, |
|
"loss": 2.0127, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"grad_norm": 0.3924960196018219, |
|
"learning_rate": 0.0017125925925925926, |
|
"loss": 2.012, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"grad_norm": 0.3628038167953491, |
|
"learning_rate": 0.0017111111111111112, |
|
"loss": 2.016, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"grad_norm": 0.34143421053886414, |
|
"learning_rate": 0.0017096296296296297, |
|
"loss": 2.0582, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"grad_norm": 0.3582156002521515, |
|
"learning_rate": 0.0017081481481481483, |
|
"loss": 2.0677, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"grad_norm": 0.3526308536529541, |
|
"learning_rate": 0.0017066666666666669, |
|
"loss": 2.0938, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"grad_norm": 0.3765566051006317, |
|
"learning_rate": 0.0017051851851851852, |
|
"loss": 2.0936, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"grad_norm": 0.35167813301086426, |
|
"learning_rate": 0.0017037037037037038, |
|
"loss": 2.1124, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"grad_norm": 0.3823408782482147, |
|
"learning_rate": 0.0017022222222222224, |
|
"loss": 2.0656, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"grad_norm": 0.33811891078948975, |
|
"learning_rate": 0.0017007407407407407, |
|
"loss": 2.1003, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 0.36594629287719727, |
|
"learning_rate": 0.0016992592592592593, |
|
"loss": 2.1514, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"grad_norm": 0.3879795968532562, |
|
"learning_rate": 0.0016977777777777779, |
|
"loss": 2.1274, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"grad_norm": 0.3433550298213959, |
|
"learning_rate": 0.0016962962962962962, |
|
"loss": 2.102, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"grad_norm": 0.34211045503616333, |
|
"learning_rate": 0.0016948148148148148, |
|
"loss": 2.0929, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"grad_norm": 0.3362341821193695, |
|
"learning_rate": 0.0016933333333333334, |
|
"loss": 2.1426, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"grad_norm": 0.3737432658672333, |
|
"learning_rate": 0.001691851851851852, |
|
"loss": 2.1574, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"grad_norm": 0.33443060517311096, |
|
"learning_rate": 0.0016903703703703703, |
|
"loss": 2.1441, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 0.3313807547092438, |
|
"learning_rate": 0.0016888888888888889, |
|
"loss": 2.1376, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"grad_norm": 0.3413750231266022, |
|
"learning_rate": 0.0016874074074074074, |
|
"loss": 2.1227, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"grad_norm": 0.356935054063797, |
|
"learning_rate": 0.0016859259259259258, |
|
"loss": 2.1511, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"grad_norm": 0.30882149934768677, |
|
"learning_rate": 0.0016844444444444444, |
|
"loss": 2.1363, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"grad_norm": 0.3477705121040344, |
|
"learning_rate": 0.001682962962962963, |
|
"loss": 2.1855, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 0.32417911291122437, |
|
"learning_rate": 0.0016814814814814817, |
|
"loss": 2.169, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.6133363842964172, |
|
"learning_rate": 0.00168, |
|
"loss": 2.1631, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"grad_norm": 0.3599429130554199, |
|
"learning_rate": 0.0016785185185185186, |
|
"loss": 1.8783, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"grad_norm": 0.40336427092552185, |
|
"learning_rate": 0.0016770370370370372, |
|
"loss": 1.9111, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"grad_norm": 0.38553932309150696, |
|
"learning_rate": 0.0016755555555555556, |
|
"loss": 1.9616, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"grad_norm": 0.41913771629333496, |
|
"learning_rate": 0.0016740740740740741, |
|
"loss": 1.9623, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"grad_norm": 0.38957417011260986, |
|
"learning_rate": 0.0016725925925925927, |
|
"loss": 1.9991, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"grad_norm": 0.35183995962142944, |
|
"learning_rate": 0.0016711111111111113, |
|
"loss": 1.9689, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"grad_norm": 0.3833864629268646, |
|
"learning_rate": 0.0016696296296296296, |
|
"loss": 1.9944, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"grad_norm": 0.38431793451309204, |
|
"learning_rate": 0.0016681481481481482, |
|
"loss": 2.0343, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"grad_norm": 0.35817861557006836, |
|
"learning_rate": 0.0016666666666666668, |
|
"loss": 2.0205, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"grad_norm": 0.35562562942504883, |
|
"learning_rate": 0.0016651851851851851, |
|
"loss": 2.0322, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 0.40019509196281433, |
|
"learning_rate": 0.0016637037037037037, |
|
"loss": 2.0804, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 0.3636208772659302, |
|
"learning_rate": 0.0016622222222222223, |
|
"loss": 2.068, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"grad_norm": 0.4120703637599945, |
|
"learning_rate": 0.0016607407407407408, |
|
"loss": 2.0357, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"grad_norm": 0.3651635944843292, |
|
"learning_rate": 0.0016592592592592592, |
|
"loss": 2.0502, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 0.36248978972435, |
|
"learning_rate": 0.0016577777777777778, |
|
"loss": 2.0824, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"grad_norm": 0.36948567628860474, |
|
"learning_rate": 0.0016562962962962963, |
|
"loss": 2.094, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"grad_norm": 0.3564412295818329, |
|
"learning_rate": 0.0016548148148148147, |
|
"loss": 2.1282, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"grad_norm": 0.3488951623439789, |
|
"learning_rate": 0.0016533333333333333, |
|
"loss": 2.081, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"grad_norm": 0.385596364736557, |
|
"learning_rate": 0.0016518518518518518, |
|
"loss": 2.1045, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 0.35894304513931274, |
|
"learning_rate": 0.0016503703703703704, |
|
"loss": 2.1105, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"grad_norm": 0.34181079268455505, |
|
"learning_rate": 0.001648888888888889, |
|
"loss": 2.1062, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"grad_norm": 0.3692533075809479, |
|
"learning_rate": 0.0016474074074074075, |
|
"loss": 2.1161, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"grad_norm": 0.37401172518730164, |
|
"learning_rate": 0.0016459259259259261, |
|
"loss": 2.1134, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"grad_norm": 0.38438746333122253, |
|
"learning_rate": 0.0016444444444444445, |
|
"loss": 2.1237, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"grad_norm": 0.35017186403274536, |
|
"learning_rate": 0.001642962962962963, |
|
"loss": 2.1042, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"grad_norm": 0.3776445984840393, |
|
"learning_rate": 0.0016414814814814816, |
|
"loss": 2.1506, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 0.6395183205604553, |
|
"learning_rate": 0.00164, |
|
"loss": 2.1215, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 0.4051409661769867, |
|
"learning_rate": 0.0016385185185185185, |
|
"loss": 1.8761, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 0.41390544176101685, |
|
"learning_rate": 0.001637037037037037, |
|
"loss": 1.8756, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"grad_norm": 0.4349888265132904, |
|
"learning_rate": 0.0016355555555555557, |
|
"loss": 1.8841, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"grad_norm": 0.4476017355918884, |
|
"learning_rate": 0.001634074074074074, |
|
"loss": 1.9162, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 0.37201741337776184, |
|
"learning_rate": 0.0016325925925925926, |
|
"loss": 1.9246, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"grad_norm": 0.3889928162097931, |
|
"learning_rate": 0.0016311111111111112, |
|
"loss": 1.9684, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"grad_norm": 0.43831804394721985, |
|
"learning_rate": 0.0016296296296296295, |
|
"loss": 1.9903, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"grad_norm": 0.397524356842041, |
|
"learning_rate": 0.001628148148148148, |
|
"loss": 1.9662, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"grad_norm": 0.40506064891815186, |
|
"learning_rate": 0.0016266666666666667, |
|
"loss": 1.9938, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"grad_norm": 0.42983636260032654, |
|
"learning_rate": 0.0016251851851851852, |
|
"loss": 1.9802, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"grad_norm": 0.39103442430496216, |
|
"learning_rate": 0.0016237037037037036, |
|
"loss": 1.9937, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"grad_norm": 0.4009477496147156, |
|
"learning_rate": 0.0016222222222222222, |
|
"loss": 2.0031, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"grad_norm": 0.36088138818740845, |
|
"learning_rate": 0.001620740740740741, |
|
"loss": 2.0663, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 0.3654880225658417, |
|
"learning_rate": 0.0016192592592592593, |
|
"loss": 2.0053, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"grad_norm": 0.35059937834739685, |
|
"learning_rate": 0.0016177777777777779, |
|
"loss": 2.0549, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"grad_norm": 0.37335512042045593, |
|
"learning_rate": 0.0016162962962962964, |
|
"loss": 2.0331, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"grad_norm": 0.36852195858955383, |
|
"learning_rate": 0.001614814814814815, |
|
"loss": 2.042, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"grad_norm": 0.3899456262588501, |
|
"learning_rate": 0.0016133333333333334, |
|
"loss": 2.0552, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 0.390413373708725, |
|
"learning_rate": 0.001611851851851852, |
|
"loss": 2.0535, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"grad_norm": 0.3956015110015869, |
|
"learning_rate": 0.0016103703703703705, |
|
"loss": 2.0662, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"grad_norm": 0.39609503746032715, |
|
"learning_rate": 0.0016088888888888889, |
|
"loss": 2.0453, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"grad_norm": 0.38103851675987244, |
|
"learning_rate": 0.0016074074074074074, |
|
"loss": 2.068, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 0.40990570187568665, |
|
"learning_rate": 0.001605925925925926, |
|
"loss": 2.0881, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"grad_norm": 0.37008753418922424, |
|
"learning_rate": 0.0016044444444444444, |
|
"loss": 2.0609, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"grad_norm": 0.4039746820926666, |
|
"learning_rate": 0.001602962962962963, |
|
"loss": 2.0809, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"grad_norm": 0.38472408056259155, |
|
"learning_rate": 0.0016014814814814815, |
|
"loss": 2.0827, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.7517541646957397, |
|
"learning_rate": 0.0016, |
|
"loss": 2.058, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"grad_norm": 0.4214729368686676, |
|
"learning_rate": 0.0015985185185185184, |
|
"loss": 1.8799, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"grad_norm": 0.41175585985183716, |
|
"learning_rate": 0.001597037037037037, |
|
"loss": 1.8452, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"grad_norm": 0.4495772421360016, |
|
"learning_rate": 0.0015955555555555556, |
|
"loss": 1.8456, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"grad_norm": 0.4272764325141907, |
|
"learning_rate": 0.001594074074074074, |
|
"loss": 1.8606, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 10.19, |
|
"grad_norm": 0.422076940536499, |
|
"learning_rate": 0.0015925925925925925, |
|
"loss": 1.8831, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 10.22, |
|
"grad_norm": 0.4146776795387268, |
|
"learning_rate": 0.001591111111111111, |
|
"loss": 1.9224, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"grad_norm": 0.4350244998931885, |
|
"learning_rate": 0.0015896296296296299, |
|
"loss": 1.9407, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"grad_norm": 0.40515050292015076, |
|
"learning_rate": 0.0015881481481481482, |
|
"loss": 1.8997, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"grad_norm": 0.4228098690509796, |
|
"learning_rate": 0.0015866666666666668, |
|
"loss": 1.9657, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"grad_norm": 0.46530553698539734, |
|
"learning_rate": 0.0015851851851851854, |
|
"loss": 1.9677, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 10.41, |
|
"grad_norm": 0.4347987174987793, |
|
"learning_rate": 0.0015837037037037037, |
|
"loss": 1.9443, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"grad_norm": 0.4162721633911133, |
|
"learning_rate": 0.0015822222222222223, |
|
"loss": 1.9851, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"grad_norm": 0.4888492822647095, |
|
"learning_rate": 0.0015807407407407408, |
|
"loss": 1.9795, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"grad_norm": 0.44969916343688965, |
|
"learning_rate": 0.0015792592592592594, |
|
"loss": 1.9934, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"grad_norm": 0.38131090998649597, |
|
"learning_rate": 0.0015777777777777778, |
|
"loss": 1.9955, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"grad_norm": 0.4034533202648163, |
|
"learning_rate": 0.0015762962962962963, |
|
"loss": 1.9994, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"grad_norm": 0.4201233386993408, |
|
"learning_rate": 0.001574814814814815, |
|
"loss": 2.0357, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"grad_norm": 0.3957751989364624, |
|
"learning_rate": 0.0015733333333333333, |
|
"loss": 2.0283, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"grad_norm": 0.40386030077934265, |
|
"learning_rate": 0.0015718518518518518, |
|
"loss": 2.0228, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"grad_norm": 0.4089337885379791, |
|
"learning_rate": 0.0015703703703703704, |
|
"loss": 2.0192, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"grad_norm": 0.38744863867759705, |
|
"learning_rate": 0.001568888888888889, |
|
"loss": 2.0336, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"grad_norm": 0.41213229298591614, |
|
"learning_rate": 0.0015674074074074073, |
|
"loss": 2.0287, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"grad_norm": 0.4100775718688965, |
|
"learning_rate": 0.001565925925925926, |
|
"loss": 2.0622, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"grad_norm": 0.3857145607471466, |
|
"learning_rate": 0.0015644444444444445, |
|
"loss": 2.0241, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"grad_norm": 0.38610953092575073, |
|
"learning_rate": 0.0015629629629629628, |
|
"loss": 2.0367, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"grad_norm": 0.38510599732398987, |
|
"learning_rate": 0.0015614814814814814, |
|
"loss": 2.0345, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"grad_norm": 0.8309366703033447, |
|
"learning_rate": 0.0015600000000000002, |
|
"loss": 2.0176, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"grad_norm": 0.38769230246543884, |
|
"learning_rate": 0.0015585185185185185, |
|
"loss": 1.8074, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"grad_norm": 0.47605445981025696, |
|
"learning_rate": 0.0015570370370370371, |
|
"loss": 1.8336, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"grad_norm": 0.4387536346912384, |
|
"learning_rate": 0.0015555555555555557, |
|
"loss": 1.7895, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"grad_norm": 0.4390158951282501, |
|
"learning_rate": 0.0015540740740740743, |
|
"loss": 1.8829, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"grad_norm": 0.40687650442123413, |
|
"learning_rate": 0.0015525925925925926, |
|
"loss": 1.8919, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 11.22, |
|
"grad_norm": 0.44035059213638306, |
|
"learning_rate": 0.0015511111111111112, |
|
"loss": 1.8691, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"grad_norm": 0.4709152579307556, |
|
"learning_rate": 0.0015496296296296298, |
|
"loss": 1.8916, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"grad_norm": 0.40621572732925415, |
|
"learning_rate": 0.001548148148148148, |
|
"loss": 1.8913, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"grad_norm": 0.4792878329753876, |
|
"learning_rate": 0.0015466666666666667, |
|
"loss": 1.9039, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"grad_norm": 0.4478456974029541, |
|
"learning_rate": 0.0015451851851851852, |
|
"loss": 1.8723, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"grad_norm": 0.4035521447658539, |
|
"learning_rate": 0.0015437037037037038, |
|
"loss": 1.9437, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"grad_norm": 0.4260112941265106, |
|
"learning_rate": 0.0015422222222222222, |
|
"loss": 1.9769, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"grad_norm": 0.43007922172546387, |
|
"learning_rate": 0.0015407407407407407, |
|
"loss": 1.9229, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 11.52, |
|
"grad_norm": 0.3983825147151947, |
|
"learning_rate": 0.0015392592592592593, |
|
"loss": 1.92, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"grad_norm": 0.42792314291000366, |
|
"learning_rate": 0.0015377777777777777, |
|
"loss": 1.9274, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"grad_norm": 0.4552850127220154, |
|
"learning_rate": 0.0015362962962962962, |
|
"loss": 1.95, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"grad_norm": 0.4422164261341095, |
|
"learning_rate": 0.0015348148148148148, |
|
"loss": 1.9896, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"grad_norm": 0.4225594103336334, |
|
"learning_rate": 0.0015333333333333334, |
|
"loss": 1.9788, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"grad_norm": 0.4389062821865082, |
|
"learning_rate": 0.0015318518518518517, |
|
"loss": 1.9784, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"grad_norm": 0.44201141595840454, |
|
"learning_rate": 0.0015303703703703703, |
|
"loss": 1.9676, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"grad_norm": 0.377491295337677, |
|
"learning_rate": 0.001528888888888889, |
|
"loss": 2.0079, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"grad_norm": 0.4113980531692505, |
|
"learning_rate": 0.0015274074074074074, |
|
"loss": 1.9995, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 11.85, |
|
"grad_norm": 0.44535115361213684, |
|
"learning_rate": 0.001525925925925926, |
|
"loss": 1.9633, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"grad_norm": 0.3994745910167694, |
|
"learning_rate": 0.0015244444444444446, |
|
"loss": 2.0275, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"grad_norm": 0.43397390842437744, |
|
"learning_rate": 0.0015229629629629632, |
|
"loss": 1.9791, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"grad_norm": 0.4297787547111511, |
|
"learning_rate": 0.0015214814814814815, |
|
"loss": 1.9933, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 0.6857362389564514, |
|
"learning_rate": 0.00152, |
|
"loss": 2.0339, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"grad_norm": 0.4128698706626892, |
|
"learning_rate": 0.0015185185185185187, |
|
"loss": 1.7699, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"grad_norm": 0.47724348306655884, |
|
"learning_rate": 0.001517037037037037, |
|
"loss": 1.7503, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"grad_norm": 0.46561479568481445, |
|
"learning_rate": 0.0015155555555555556, |
|
"loss": 1.7583, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 12.15, |
|
"grad_norm": 0.4969346821308136, |
|
"learning_rate": 0.0015140740740740742, |
|
"loss": 1.7912, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"grad_norm": 0.4789353013038635, |
|
"learning_rate": 0.0015125925925925925, |
|
"loss": 1.81, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"grad_norm": 0.43490344285964966, |
|
"learning_rate": 0.001511111111111111, |
|
"loss": 1.8208, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"grad_norm": 0.4532495141029358, |
|
"learning_rate": 0.0015096296296296296, |
|
"loss": 1.8588, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"grad_norm": 0.468681275844574, |
|
"learning_rate": 0.0015081481481481482, |
|
"loss": 1.8655, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"grad_norm": 0.4549664556980133, |
|
"learning_rate": 0.0015066666666666666, |
|
"loss": 1.9103, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"grad_norm": 0.45052316784858704, |
|
"learning_rate": 0.0015051851851851851, |
|
"loss": 1.8733, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"grad_norm": 0.5009397268295288, |
|
"learning_rate": 0.0015037037037037037, |
|
"loss": 1.9003, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"grad_norm": 0.45916783809661865, |
|
"learning_rate": 0.001502222222222222, |
|
"loss": 1.9181, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"grad_norm": 0.4555447995662689, |
|
"learning_rate": 0.0015007407407407406, |
|
"loss": 1.9055, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"grad_norm": 0.42393794655799866, |
|
"learning_rate": 0.0014992592592592594, |
|
"loss": 1.8778, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"grad_norm": 0.41853970289230347, |
|
"learning_rate": 0.001497777777777778, |
|
"loss": 1.9385, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"grad_norm": 0.44111889600753784, |
|
"learning_rate": 0.0014962962962962963, |
|
"loss": 1.9134, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"grad_norm": 0.4621415138244629, |
|
"learning_rate": 0.001494814814814815, |
|
"loss": 1.9066, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"grad_norm": 0.4527168869972229, |
|
"learning_rate": 0.0014933333333333335, |
|
"loss": 1.9641, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"grad_norm": 0.4204464554786682, |
|
"learning_rate": 0.0014918518518518518, |
|
"loss": 1.9363, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"grad_norm": 0.44409334659576416, |
|
"learning_rate": 0.0014903703703703704, |
|
"loss": 1.9602, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 12.78, |
|
"grad_norm": 0.42912226915359497, |
|
"learning_rate": 0.001488888888888889, |
|
"loss": 1.9766, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"grad_norm": 0.440065860748291, |
|
"learning_rate": 0.0014874074074074076, |
|
"loss": 1.9631, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"grad_norm": 0.528200626373291, |
|
"learning_rate": 0.001485925925925926, |
|
"loss": 1.9108, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"grad_norm": 0.41785484552383423, |
|
"learning_rate": 0.0014844444444444445, |
|
"loss": 1.9691, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"grad_norm": 0.4229365885257721, |
|
"learning_rate": 0.001482962962962963, |
|
"loss": 1.9762, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"grad_norm": 0.43063077330589294, |
|
"learning_rate": 0.0014814814814814814, |
|
"loss": 1.9718, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 0.9820219278335571, |
|
"learning_rate": 0.00148, |
|
"loss": 1.9837, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"grad_norm": 0.4445718824863434, |
|
"learning_rate": 0.0014785185185185185, |
|
"loss": 1.6957, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"grad_norm": 0.49743878841400146, |
|
"learning_rate": 0.001477037037037037, |
|
"loss": 1.7397, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 13.11, |
|
"grad_norm": 0.45971688628196716, |
|
"learning_rate": 0.0014755555555555555, |
|
"loss": 1.7366, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"grad_norm": 0.4774453639984131, |
|
"learning_rate": 0.001474074074074074, |
|
"loss": 1.778, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"grad_norm": 0.4667341411113739, |
|
"learning_rate": 0.0014725925925925926, |
|
"loss": 1.7641, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"grad_norm": 0.5121150612831116, |
|
"learning_rate": 0.001471111111111111, |
|
"loss": 1.8191, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"grad_norm": 0.49322351813316345, |
|
"learning_rate": 0.0014696296296296298, |
|
"loss": 1.8065, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"grad_norm": 0.4913088083267212, |
|
"learning_rate": 0.0014681481481481483, |
|
"loss": 1.834, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"grad_norm": 0.45339229702949524, |
|
"learning_rate": 0.0014666666666666667, |
|
"loss": 1.8561, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"grad_norm": 0.4763079285621643, |
|
"learning_rate": 0.0014651851851851853, |
|
"loss": 1.844, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 13.41, |
|
"grad_norm": 0.5057812929153442, |
|
"learning_rate": 0.0014637037037037038, |
|
"loss": 1.8698, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"grad_norm": 0.4484459459781647, |
|
"learning_rate": 0.0014622222222222224, |
|
"loss": 1.854, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"grad_norm": 0.45381781458854675, |
|
"learning_rate": 0.0014607407407407407, |
|
"loss": 1.8449, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"grad_norm": 0.49549397826194763, |
|
"learning_rate": 0.0014592592592592593, |
|
"loss": 1.8873, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"grad_norm": 0.44379979372024536, |
|
"learning_rate": 0.0014577777777777779, |
|
"loss": 1.8636, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"grad_norm": 0.5110834836959839, |
|
"learning_rate": 0.0014562962962962962, |
|
"loss": 1.8945, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"grad_norm": 0.45052438974380493, |
|
"learning_rate": 0.0014548148148148148, |
|
"loss": 1.9166, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"grad_norm": 0.44467103481292725, |
|
"learning_rate": 0.0014533333333333334, |
|
"loss": 1.8971, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"grad_norm": 0.46108537912368774, |
|
"learning_rate": 0.001451851851851852, |
|
"loss": 1.8986, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"grad_norm": 0.4929622411727905, |
|
"learning_rate": 0.0014503703703703703, |
|
"loss": 1.9173, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"grad_norm": 0.45322754979133606, |
|
"learning_rate": 0.0014488888888888889, |
|
"loss": 1.9376, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"grad_norm": 0.43895524740219116, |
|
"learning_rate": 0.0014474074074074075, |
|
"loss": 1.9394, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"grad_norm": 0.43144819140434265, |
|
"learning_rate": 0.0014459259259259258, |
|
"loss": 1.892, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"grad_norm": 0.45700499415397644, |
|
"learning_rate": 0.0014444444444444444, |
|
"loss": 1.9435, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"grad_norm": 0.46102723479270935, |
|
"learning_rate": 0.001442962962962963, |
|
"loss": 1.9424, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"grad_norm": 0.4725353717803955, |
|
"learning_rate": 0.0014414814814814815, |
|
"loss": 1.9018, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 0.9278309345245361, |
|
"learning_rate": 0.0014399999999999999, |
|
"loss": 1.9556, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 14.04, |
|
"grad_norm": 0.528106153011322, |
|
"learning_rate": 0.0014385185185185187, |
|
"loss": 1.7123, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"grad_norm": 0.5264120101928711, |
|
"learning_rate": 0.0014370370370370372, |
|
"loss": 1.6959, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"grad_norm": 0.5138201117515564, |
|
"learning_rate": 0.0014355555555555556, |
|
"loss": 1.7195, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"grad_norm": 0.5557966232299805, |
|
"learning_rate": 0.0014340740740740742, |
|
"loss": 1.7021, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"grad_norm": 0.4654610753059387, |
|
"learning_rate": 0.0014325925925925927, |
|
"loss": 1.7411, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"grad_norm": 0.49798938632011414, |
|
"learning_rate": 0.001431111111111111, |
|
"loss": 1.742, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"grad_norm": 0.4929085969924927, |
|
"learning_rate": 0.0014296296296296297, |
|
"loss": 1.7655, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 14.3, |
|
"grad_norm": 0.49013039469718933, |
|
"learning_rate": 0.0014281481481481482, |
|
"loss": 1.7726, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"grad_norm": 0.5224959254264832, |
|
"learning_rate": 0.0014266666666666668, |
|
"loss": 1.7967, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 14.37, |
|
"grad_norm": 0.5195993185043335, |
|
"learning_rate": 0.0014251851851851851, |
|
"loss": 1.8479, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"grad_norm": 0.4698856770992279, |
|
"learning_rate": 0.0014237037037037037, |
|
"loss": 1.8365, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"grad_norm": 0.5218276977539062, |
|
"learning_rate": 0.0014222222222222223, |
|
"loss": 1.8239, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"grad_norm": 0.4560031294822693, |
|
"learning_rate": 0.0014207407407407406, |
|
"loss": 1.8485, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"grad_norm": 0.47998443245887756, |
|
"learning_rate": 0.0014192592592592592, |
|
"loss": 1.8399, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"grad_norm": 0.4536432921886444, |
|
"learning_rate": 0.0014177777777777778, |
|
"loss": 1.8454, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"grad_norm": 0.4415784776210785, |
|
"learning_rate": 0.0014162962962962964, |
|
"loss": 1.8784, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"grad_norm": 0.4850631356239319, |
|
"learning_rate": 0.0014148148148148147, |
|
"loss": 1.8635, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 14.67, |
|
"grad_norm": 0.4737834930419922, |
|
"learning_rate": 0.0014133333333333333, |
|
"loss": 1.8486, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 14.7, |
|
"grad_norm": 0.4832935631275177, |
|
"learning_rate": 0.0014118518518518519, |
|
"loss": 1.8626, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"grad_norm": 0.4913930594921112, |
|
"learning_rate": 0.0014103703703703702, |
|
"loss": 1.9059, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 14.78, |
|
"grad_norm": 0.4726288616657257, |
|
"learning_rate": 0.001408888888888889, |
|
"loss": 1.893, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 14.81, |
|
"grad_norm": 0.47808048129081726, |
|
"learning_rate": 0.0014074074074074076, |
|
"loss": 1.864, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 14.85, |
|
"grad_norm": 0.493651419878006, |
|
"learning_rate": 0.0014059259259259261, |
|
"loss": 1.907, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"grad_norm": 0.47489118576049805, |
|
"learning_rate": 0.0014044444444444445, |
|
"loss": 1.901, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 14.93, |
|
"grad_norm": 0.45440223813056946, |
|
"learning_rate": 0.001402962962962963, |
|
"loss": 1.8882, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"grad_norm": 0.4635215103626251, |
|
"learning_rate": 0.0014014814814814816, |
|
"loss": 1.9037, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 0.9060683846473694, |
|
"learning_rate": 0.0014, |
|
"loss": 1.8878, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"grad_norm": 0.4977479577064514, |
|
"learning_rate": 0.0013985185185185186, |
|
"loss": 1.6462, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 15.07, |
|
"grad_norm": 0.5147350430488586, |
|
"learning_rate": 0.0013970370370370371, |
|
"loss": 1.6374, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"grad_norm": 0.5145394206047058, |
|
"learning_rate": 0.0013955555555555557, |
|
"loss": 1.6771, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 15.15, |
|
"grad_norm": 0.5721226930618286, |
|
"learning_rate": 0.001394074074074074, |
|
"loss": 1.686, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 15.19, |
|
"grad_norm": 0.4931713938713074, |
|
"learning_rate": 0.0013925925925925926, |
|
"loss": 1.6849, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 15.22, |
|
"grad_norm": 0.5027201175689697, |
|
"learning_rate": 0.0013911111111111112, |
|
"loss": 1.7257, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"grad_norm": 0.5069899559020996, |
|
"learning_rate": 0.0013896296296296295, |
|
"loss": 1.766, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 15.3, |
|
"grad_norm": 0.538689136505127, |
|
"learning_rate": 0.0013881481481481481, |
|
"loss": 1.7425, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 15.33, |
|
"grad_norm": 0.4966994822025299, |
|
"learning_rate": 0.0013866666666666667, |
|
"loss": 1.7532, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 15.37, |
|
"grad_norm": 0.504673421382904, |
|
"learning_rate": 0.001385185185185185, |
|
"loss": 1.7633, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"grad_norm": 0.547132134437561, |
|
"learning_rate": 0.0013837037037037036, |
|
"loss": 1.8001, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"grad_norm": 0.5524857044219971, |
|
"learning_rate": 0.0013822222222222222, |
|
"loss": 1.7945, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 15.48, |
|
"grad_norm": 0.5102695822715759, |
|
"learning_rate": 0.0013807407407407408, |
|
"loss": 1.8037, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"grad_norm": 0.48458731174468994, |
|
"learning_rate": 0.0013792592592592591, |
|
"loss": 1.8401, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"grad_norm": 0.4723525643348694, |
|
"learning_rate": 0.001377777777777778, |
|
"loss": 1.7892, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"grad_norm": 0.5020443201065063, |
|
"learning_rate": 0.0013762962962962965, |
|
"loss": 1.8237, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 15.63, |
|
"grad_norm": 0.47924599051475525, |
|
"learning_rate": 0.0013748148148148148, |
|
"loss": 1.8486, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 15.67, |
|
"grad_norm": 0.5095358490943909, |
|
"learning_rate": 0.0013733333333333334, |
|
"loss": 1.8571, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"grad_norm": 0.4978972375392914, |
|
"learning_rate": 0.001371851851851852, |
|
"loss": 1.8439, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"grad_norm": 0.5177919268608093, |
|
"learning_rate": 0.0013703703703703705, |
|
"loss": 1.8682, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 15.78, |
|
"grad_norm": 0.5087350010871887, |
|
"learning_rate": 0.0013688888888888889, |
|
"loss": 1.8264, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 15.81, |
|
"grad_norm": 0.5243315100669861, |
|
"learning_rate": 0.0013674074074074075, |
|
"loss": 1.8701, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"grad_norm": 0.5063629746437073, |
|
"learning_rate": 0.001365925925925926, |
|
"loss": 1.8565, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"grad_norm": 0.4717652201652527, |
|
"learning_rate": 0.0013644444444444444, |
|
"loss": 1.8555, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"grad_norm": 0.4810638725757599, |
|
"learning_rate": 0.001362962962962963, |
|
"loss": 1.8492, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"grad_norm": 0.4636698365211487, |
|
"learning_rate": 0.0013614814814814815, |
|
"loss": 1.8597, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 0.9389230608940125, |
|
"learning_rate": 0.00136, |
|
"loss": 1.8731, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"grad_norm": 0.5125114917755127, |
|
"learning_rate": 0.0013585185185185185, |
|
"loss": 1.6402, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"grad_norm": 0.5031929612159729, |
|
"learning_rate": 0.001357037037037037, |
|
"loss": 1.6566, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 16.11, |
|
"grad_norm": 0.4980713427066803, |
|
"learning_rate": 0.0013555555555555556, |
|
"loss": 1.6759, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"grad_norm": 0.49954333901405334, |
|
"learning_rate": 0.001354074074074074, |
|
"loss": 1.6649, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"grad_norm": 0.5035192966461182, |
|
"learning_rate": 0.0013525925925925925, |
|
"loss": 1.6801, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 16.22, |
|
"grad_norm": 0.5428767800331116, |
|
"learning_rate": 0.001351111111111111, |
|
"loss": 1.6574, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 16.26, |
|
"grad_norm": 0.5608072280883789, |
|
"learning_rate": 0.0013496296296296297, |
|
"loss": 1.6877, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"grad_norm": 0.5320686101913452, |
|
"learning_rate": 0.0013481481481481482, |
|
"loss": 1.6884, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 16.33, |
|
"grad_norm": 0.5398997664451599, |
|
"learning_rate": 0.0013466666666666668, |
|
"loss": 1.717, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"grad_norm": 0.4949876368045807, |
|
"learning_rate": 0.0013451851851851854, |
|
"loss": 1.763, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"grad_norm": 0.4945550262928009, |
|
"learning_rate": 0.0013437037037037037, |
|
"loss": 1.7689, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"grad_norm": 0.5274211764335632, |
|
"learning_rate": 0.0013422222222222223, |
|
"loss": 1.7568, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 16.48, |
|
"grad_norm": 0.5125142931938171, |
|
"learning_rate": 0.0013407407407407409, |
|
"loss": 1.782, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"grad_norm": 0.4983424246311188, |
|
"learning_rate": 0.0013392592592592592, |
|
"loss": 1.8036, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"grad_norm": 0.49390140175819397, |
|
"learning_rate": 0.0013377777777777778, |
|
"loss": 1.7774, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 16.59, |
|
"grad_norm": 0.4962531626224518, |
|
"learning_rate": 0.0013362962962962964, |
|
"loss": 1.7785, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 16.63, |
|
"grad_norm": 0.4956791400909424, |
|
"learning_rate": 0.001334814814814815, |
|
"loss": 1.7814, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"grad_norm": 0.5397014617919922, |
|
"learning_rate": 0.0013333333333333333, |
|
"loss": 1.8125, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"grad_norm": 0.4916256070137024, |
|
"learning_rate": 0.0013318518518518519, |
|
"loss": 1.7915, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 16.74, |
|
"grad_norm": 0.5432267189025879, |
|
"learning_rate": 0.0013303703703703704, |
|
"loss": 1.8132, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 16.78, |
|
"grad_norm": 0.480970174074173, |
|
"learning_rate": 0.0013288888888888888, |
|
"loss": 1.7865, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"grad_norm": 0.4810551702976227, |
|
"learning_rate": 0.0013274074074074074, |
|
"loss": 1.8132, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 16.85, |
|
"grad_norm": 0.4746386706829071, |
|
"learning_rate": 0.001325925925925926, |
|
"loss": 1.8493, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 16.89, |
|
"grad_norm": 0.5167309641838074, |
|
"learning_rate": 0.0013244444444444445, |
|
"loss": 1.8126, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"grad_norm": 0.49187368154525757, |
|
"learning_rate": 0.0013229629629629629, |
|
"loss": 1.8484, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 16.96, |
|
"grad_norm": 0.5493544340133667, |
|
"learning_rate": 0.0013214814814814814, |
|
"loss": 1.81, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 1.0746710300445557, |
|
"learning_rate": 0.00132, |
|
"loss": 1.8312, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 17.04, |
|
"grad_norm": 0.5131716132164001, |
|
"learning_rate": 0.0013185185185185183, |
|
"loss": 1.6069, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 17.07, |
|
"grad_norm": 0.5575482845306396, |
|
"learning_rate": 0.0013170370370370371, |
|
"loss": 1.6018, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"grad_norm": 0.5470114350318909, |
|
"learning_rate": 0.0013155555555555557, |
|
"loss": 1.6125, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"grad_norm": 0.5711544752120972, |
|
"learning_rate": 0.0013140740740740743, |
|
"loss": 1.6186, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"grad_norm": 0.550546407699585, |
|
"learning_rate": 0.0013125925925925926, |
|
"loss": 1.6509, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"grad_norm": 0.5357831120491028, |
|
"learning_rate": 0.0013111111111111112, |
|
"loss": 1.6721, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 17.26, |
|
"grad_norm": 0.598581850528717, |
|
"learning_rate": 0.0013096296296296298, |
|
"loss": 1.6795, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"grad_norm": 0.5127149820327759, |
|
"learning_rate": 0.0013081481481481481, |
|
"loss": 1.6937, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"grad_norm": 0.523218035697937, |
|
"learning_rate": 0.0013066666666666667, |
|
"loss": 1.707, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"grad_norm": 0.5636884570121765, |
|
"learning_rate": 0.0013051851851851853, |
|
"loss": 1.7077, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 17.41, |
|
"grad_norm": 0.5461720824241638, |
|
"learning_rate": 0.0013037037037037038, |
|
"loss": 1.7289, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 17.44, |
|
"grad_norm": 0.569513201713562, |
|
"learning_rate": 0.0013022222222222222, |
|
"loss": 1.7377, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 17.48, |
|
"grad_norm": 0.526752769947052, |
|
"learning_rate": 0.0013007407407407408, |
|
"loss": 1.7277, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 17.52, |
|
"grad_norm": 0.5269809365272522, |
|
"learning_rate": 0.0012992592592592593, |
|
"loss": 1.7526, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 17.56, |
|
"grad_norm": 0.5578441023826599, |
|
"learning_rate": 0.0012977777777777777, |
|
"loss": 1.7372, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 17.59, |
|
"grad_norm": 0.5600742101669312, |
|
"learning_rate": 0.0012962962962962963, |
|
"loss": 1.7519, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"grad_norm": 0.5667843818664551, |
|
"learning_rate": 0.0012948148148148148, |
|
"loss": 1.7624, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 17.67, |
|
"grad_norm": 0.5641533732414246, |
|
"learning_rate": 0.0012933333333333332, |
|
"loss": 1.7674, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 17.7, |
|
"grad_norm": 0.49737048149108887, |
|
"learning_rate": 0.0012918518518518518, |
|
"loss": 1.8026, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 17.74, |
|
"grad_norm": 0.5009216666221619, |
|
"learning_rate": 0.0012903703703703703, |
|
"loss": 1.7699, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"grad_norm": 0.5463806986808777, |
|
"learning_rate": 0.001288888888888889, |
|
"loss": 1.7999, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"grad_norm": 0.48484352231025696, |
|
"learning_rate": 0.0012874074074074075, |
|
"loss": 1.7801, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 17.85, |
|
"grad_norm": 0.5290578007698059, |
|
"learning_rate": 0.001285925925925926, |
|
"loss": 1.771, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 17.89, |
|
"grad_norm": 0.5166163444519043, |
|
"learning_rate": 0.0012844444444444446, |
|
"loss": 1.7913, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"grad_norm": 0.502242922782898, |
|
"learning_rate": 0.001282962962962963, |
|
"loss": 1.7949, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"grad_norm": 0.5369807481765747, |
|
"learning_rate": 0.0012814814814814815, |
|
"loss": 1.7944, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"grad_norm": 1.1158689260482788, |
|
"learning_rate": 0.00128, |
|
"loss": 1.7905, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 18.04, |
|
"grad_norm": 0.5336357951164246, |
|
"learning_rate": 0.0012785185185185187, |
|
"loss": 1.5305, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"grad_norm": 0.5469295382499695, |
|
"learning_rate": 0.001277037037037037, |
|
"loss": 1.5331, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"grad_norm": 0.5697037577629089, |
|
"learning_rate": 0.0012755555555555556, |
|
"loss": 1.5679, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 18.15, |
|
"grad_norm": 0.5730404853820801, |
|
"learning_rate": 0.0012740740740740742, |
|
"loss": 1.6015, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 18.19, |
|
"grad_norm": 0.6694100499153137, |
|
"learning_rate": 0.0012725925925925925, |
|
"loss": 1.6017, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 18.22, |
|
"grad_norm": 0.5456265807151794, |
|
"learning_rate": 0.001271111111111111, |
|
"loss": 1.6246, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"grad_norm": 0.6163968443870544, |
|
"learning_rate": 0.0012696296296296297, |
|
"loss": 1.6349, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 18.3, |
|
"grad_norm": 0.5508036613464355, |
|
"learning_rate": 0.0012681481481481482, |
|
"loss": 1.6618, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"grad_norm": 0.5920714735984802, |
|
"learning_rate": 0.0012666666666666666, |
|
"loss": 1.6871, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 18.37, |
|
"grad_norm": 0.5981717109680176, |
|
"learning_rate": 0.0012651851851851852, |
|
"loss": 1.656, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"grad_norm": 0.5558621287345886, |
|
"learning_rate": 0.0012637037037037037, |
|
"loss": 1.687, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"grad_norm": 0.5336385369300842, |
|
"learning_rate": 0.001262222222222222, |
|
"loss": 1.6954, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 18.48, |
|
"grad_norm": 0.56126469373703, |
|
"learning_rate": 0.0012607407407407407, |
|
"loss": 1.7021, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 18.52, |
|
"grad_norm": 0.5533148646354675, |
|
"learning_rate": 0.0012592592592592592, |
|
"loss": 1.6883, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"grad_norm": 0.5625854730606079, |
|
"learning_rate": 0.001257777777777778, |
|
"loss": 1.7399, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"grad_norm": 0.5190121531486511, |
|
"learning_rate": 0.0012562962962962964, |
|
"loss": 1.728, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 18.63, |
|
"grad_norm": 0.5855059027671814, |
|
"learning_rate": 0.001254814814814815, |
|
"loss": 1.7204, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 18.67, |
|
"grad_norm": 0.575351893901825, |
|
"learning_rate": 0.0012533333333333335, |
|
"loss": 1.7359, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"grad_norm": 0.49753502011299133, |
|
"learning_rate": 0.0012518518518518519, |
|
"loss": 1.7195, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"grad_norm": 0.5619398355484009, |
|
"learning_rate": 0.0012503703703703704, |
|
"loss": 1.7515, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"grad_norm": 0.5172871351242065, |
|
"learning_rate": 0.001248888888888889, |
|
"loss": 1.7777, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"grad_norm": 0.5624790191650391, |
|
"learning_rate": 0.0012474074074074074, |
|
"loss": 1.782, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"grad_norm": 0.5403957366943359, |
|
"learning_rate": 0.001245925925925926, |
|
"loss": 1.7426, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"grad_norm": 0.5816079378128052, |
|
"learning_rate": 0.0012444444444444445, |
|
"loss": 1.8058, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 18.93, |
|
"grad_norm": 0.5828419327735901, |
|
"learning_rate": 0.001242962962962963, |
|
"loss": 1.8004, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"grad_norm": 0.5489016175270081, |
|
"learning_rate": 0.0012414814814814814, |
|
"loss": 1.7684, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"grad_norm": 1.1415126323699951, |
|
"learning_rate": 0.00124, |
|
"loss": 1.7936, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"grad_norm": 0.5856507420539856, |
|
"learning_rate": 0.0012385185185185186, |
|
"loss": 1.5181, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"grad_norm": 0.5820109248161316, |
|
"learning_rate": 0.001237037037037037, |
|
"loss": 1.5436, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"grad_norm": 0.591582715511322, |
|
"learning_rate": 0.0012355555555555555, |
|
"loss": 1.5365, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"grad_norm": 0.5697777271270752, |
|
"learning_rate": 0.001234074074074074, |
|
"loss": 1.5799, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"grad_norm": 0.5221427083015442, |
|
"learning_rate": 0.0012325925925925926, |
|
"loss": 1.606, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"grad_norm": 0.5956182479858398, |
|
"learning_rate": 0.001231111111111111, |
|
"loss": 1.6102, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 19.26, |
|
"grad_norm": 0.5815330743789673, |
|
"learning_rate": 0.0012296296296296296, |
|
"loss": 1.6191, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 19.3, |
|
"grad_norm": 0.5627025365829468, |
|
"learning_rate": 0.0012281481481481481, |
|
"loss": 1.629, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 19.33, |
|
"grad_norm": 0.5641065835952759, |
|
"learning_rate": 0.0012266666666666667, |
|
"loss": 1.6447, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"grad_norm": 0.5593487024307251, |
|
"learning_rate": 0.0012251851851851853, |
|
"loss": 1.6571, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"grad_norm": 0.570100724697113, |
|
"learning_rate": 0.0012237037037037038, |
|
"loss": 1.6696, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 19.44, |
|
"grad_norm": 0.5685579776763916, |
|
"learning_rate": 0.0012222222222222224, |
|
"loss": 1.664, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"grad_norm": 0.5899671912193298, |
|
"learning_rate": 0.0012207407407407408, |
|
"loss": 1.6643, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 19.52, |
|
"grad_norm": 0.5786183476448059, |
|
"learning_rate": 0.0012192592592592593, |
|
"loss": 1.6632, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 19.56, |
|
"grad_norm": 0.5458264946937561, |
|
"learning_rate": 0.001217777777777778, |
|
"loss": 1.6924, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 19.59, |
|
"grad_norm": 0.5607069730758667, |
|
"learning_rate": 0.0012162962962962963, |
|
"loss": 1.6996, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 19.63, |
|
"grad_norm": 0.5821597576141357, |
|
"learning_rate": 0.0012148148148148148, |
|
"loss": 1.7002, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"grad_norm": 0.5918251872062683, |
|
"learning_rate": 0.0012133333333333334, |
|
"loss": 1.6962, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"grad_norm": 0.5013391971588135, |
|
"learning_rate": 0.001211851851851852, |
|
"loss": 1.7107, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"grad_norm": 0.5495138168334961, |
|
"learning_rate": 0.0012103703703703703, |
|
"loss": 1.7032, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 19.78, |
|
"grad_norm": 0.565083384513855, |
|
"learning_rate": 0.001208888888888889, |
|
"loss": 1.7078, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 19.81, |
|
"grad_norm": 0.6654185652732849, |
|
"learning_rate": 0.0012074074074074075, |
|
"loss": 1.7146, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"grad_norm": 0.5463594198226929, |
|
"learning_rate": 0.0012059259259259258, |
|
"loss": 1.7316, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 19.89, |
|
"grad_norm": 0.5753750205039978, |
|
"learning_rate": 0.0012044444444444444, |
|
"loss": 1.7339, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 19.93, |
|
"grad_norm": 0.5591763854026794, |
|
"learning_rate": 0.001202962962962963, |
|
"loss": 1.7212, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"grad_norm": 0.5575348734855652, |
|
"learning_rate": 0.0012014814814814813, |
|
"loss": 1.7412, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 1.3188482522964478, |
|
"learning_rate": 0.0012, |
|
"loss": 1.7287, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 20.04, |
|
"grad_norm": 0.5789737701416016, |
|
"learning_rate": 0.0011985185185185185, |
|
"loss": 1.4605, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 20.07, |
|
"grad_norm": 0.6385830044746399, |
|
"learning_rate": 0.0011970370370370373, |
|
"loss": 1.4666, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 20.11, |
|
"grad_norm": 0.6019684076309204, |
|
"learning_rate": 0.0011955555555555556, |
|
"loss": 1.5268, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 20.15, |
|
"grad_norm": 0.5914923548698425, |
|
"learning_rate": 0.0011940740740740742, |
|
"loss": 1.5409, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 20.19, |
|
"grad_norm": 0.6487402319908142, |
|
"learning_rate": 0.0011925925925925927, |
|
"loss": 1.5568, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 20.22, |
|
"grad_norm": 0.6002733707427979, |
|
"learning_rate": 0.001191111111111111, |
|
"loss": 1.5492, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 20.26, |
|
"grad_norm": 0.6476941108703613, |
|
"learning_rate": 0.0011896296296296297, |
|
"loss": 1.5677, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 20.3, |
|
"grad_norm": 0.5633615851402283, |
|
"learning_rate": 0.0011881481481481482, |
|
"loss": 1.6167, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"grad_norm": 0.5733054876327515, |
|
"learning_rate": 0.0011866666666666668, |
|
"loss": 1.6335, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 20.37, |
|
"grad_norm": 0.5960119366645813, |
|
"learning_rate": 0.0011851851851851852, |
|
"loss": 1.5886, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 20.41, |
|
"grad_norm": 0.5275907516479492, |
|
"learning_rate": 0.0011837037037037037, |
|
"loss": 1.624, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 20.44, |
|
"grad_norm": 0.6697429418563843, |
|
"learning_rate": 0.0011822222222222223, |
|
"loss": 1.6462, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 20.48, |
|
"grad_norm": 0.5955538749694824, |
|
"learning_rate": 0.0011807407407407407, |
|
"loss": 1.6311, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 20.52, |
|
"grad_norm": 0.6113746166229248, |
|
"learning_rate": 0.0011792592592592592, |
|
"loss": 1.6212, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 20.56, |
|
"grad_norm": 0.5709012150764465, |
|
"learning_rate": 0.0011777777777777778, |
|
"loss": 1.6609, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"grad_norm": 0.6236965656280518, |
|
"learning_rate": 0.0011762962962962964, |
|
"loss": 1.6794, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"grad_norm": 0.5546655654907227, |
|
"learning_rate": 0.0011748148148148147, |
|
"loss": 1.6809, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 20.67, |
|
"grad_norm": 0.5919294357299805, |
|
"learning_rate": 0.0011733333333333333, |
|
"loss": 1.6551, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 20.7, |
|
"grad_norm": 0.5872615575790405, |
|
"learning_rate": 0.0011718518518518519, |
|
"loss": 1.6593, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 20.74, |
|
"grad_norm": 0.557774543762207, |
|
"learning_rate": 0.0011703703703703702, |
|
"loss": 1.6974, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"grad_norm": 0.559350311756134, |
|
"learning_rate": 0.0011688888888888888, |
|
"loss": 1.6906, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 20.81, |
|
"grad_norm": 0.59498530626297, |
|
"learning_rate": 0.0011674074074074074, |
|
"loss": 1.7249, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 20.85, |
|
"grad_norm": 0.6189757585525513, |
|
"learning_rate": 0.0011659259259259262, |
|
"loss": 1.6822, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 20.89, |
|
"grad_norm": 0.5750965476036072, |
|
"learning_rate": 0.0011644444444444445, |
|
"loss": 1.6976, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 20.93, |
|
"grad_norm": 0.6109630465507507, |
|
"learning_rate": 0.001162962962962963, |
|
"loss": 1.704, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"grad_norm": 0.5818935036659241, |
|
"learning_rate": 0.0011614814814814817, |
|
"loss": 1.701, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"grad_norm": 1.055619716644287, |
|
"learning_rate": 0.00116, |
|
"loss": 1.738, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 21.04, |
|
"grad_norm": 0.6038652062416077, |
|
"learning_rate": 0.0011585185185185186, |
|
"loss": 1.4224, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 21.07, |
|
"grad_norm": 0.7057866454124451, |
|
"learning_rate": 0.0011570370370370371, |
|
"loss": 1.4838, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 21.11, |
|
"grad_norm": 0.6207330226898193, |
|
"learning_rate": 0.0011555555555555555, |
|
"loss": 1.5158, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 21.15, |
|
"grad_norm": 0.6148425340652466, |
|
"learning_rate": 0.001154074074074074, |
|
"loss": 1.4815, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 21.19, |
|
"grad_norm": 0.6946470737457275, |
|
"learning_rate": 0.0011525925925925926, |
|
"loss": 1.5331, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 21.22, |
|
"grad_norm": 0.6209535002708435, |
|
"learning_rate": 0.0011511111111111112, |
|
"loss": 1.5187, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"grad_norm": 0.6300204396247864, |
|
"learning_rate": 0.0011496296296296296, |
|
"loss": 1.5572, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 21.3, |
|
"grad_norm": 0.594424843788147, |
|
"learning_rate": 0.0011481481481481481, |
|
"loss": 1.5599, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 21.33, |
|
"grad_norm": 0.6507193446159363, |
|
"learning_rate": 0.0011466666666666667, |
|
"loss": 1.5839, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 21.37, |
|
"grad_norm": 0.5893404483795166, |
|
"learning_rate": 0.001145185185185185, |
|
"loss": 1.5907, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 21.41, |
|
"grad_norm": 0.6263285875320435, |
|
"learning_rate": 0.0011437037037037036, |
|
"loss": 1.6002, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 21.44, |
|
"grad_norm": 0.6052868366241455, |
|
"learning_rate": 0.0011422222222222222, |
|
"loss": 1.5703, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 21.48, |
|
"grad_norm": 0.6054526567459106, |
|
"learning_rate": 0.0011407407407407408, |
|
"loss": 1.6044, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 21.52, |
|
"grad_norm": 0.6268815994262695, |
|
"learning_rate": 0.0011392592592592591, |
|
"loss": 1.5775, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 21.56, |
|
"grad_norm": 0.6130894422531128, |
|
"learning_rate": 0.0011377777777777777, |
|
"loss": 1.6355, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 21.59, |
|
"grad_norm": 0.5609670281410217, |
|
"learning_rate": 0.0011362962962962965, |
|
"loss": 1.6457, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 21.63, |
|
"grad_norm": 0.5807933807373047, |
|
"learning_rate": 0.0011348148148148148, |
|
"loss": 1.6233, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 21.67, |
|
"grad_norm": 0.6003103256225586, |
|
"learning_rate": 0.0011333333333333334, |
|
"loss": 1.6452, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 21.7, |
|
"grad_norm": 0.5617521405220032, |
|
"learning_rate": 0.001131851851851852, |
|
"loss": 1.6673, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 21.74, |
|
"grad_norm": 0.6276643872261047, |
|
"learning_rate": 0.0011303703703703706, |
|
"loss": 1.6296, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 21.78, |
|
"grad_norm": 0.6255550980567932, |
|
"learning_rate": 0.001128888888888889, |
|
"loss": 1.666, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 21.81, |
|
"grad_norm": 0.625771701335907, |
|
"learning_rate": 0.0011274074074074075, |
|
"loss": 1.677, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 21.85, |
|
"grad_norm": 0.5867087841033936, |
|
"learning_rate": 0.001125925925925926, |
|
"loss": 1.6738, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 21.89, |
|
"grad_norm": 0.6343807578086853, |
|
"learning_rate": 0.0011244444444444444, |
|
"loss": 1.6722, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 21.93, |
|
"grad_norm": 0.5979423522949219, |
|
"learning_rate": 0.001122962962962963, |
|
"loss": 1.6602, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 21.96, |
|
"grad_norm": 0.5572076439857483, |
|
"learning_rate": 0.0011214814814814815, |
|
"loss": 1.7001, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"grad_norm": 1.0608664751052856, |
|
"learning_rate": 0.0011200000000000001, |
|
"loss": 1.703, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 22.04, |
|
"grad_norm": 0.6108244061470032, |
|
"learning_rate": 0.0011185185185185185, |
|
"loss": 1.4507, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 22.07, |
|
"grad_norm": 0.6707803010940552, |
|
"learning_rate": 0.001117037037037037, |
|
"loss": 1.4117, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 22.11, |
|
"grad_norm": 0.6366927623748779, |
|
"learning_rate": 0.0011155555555555556, |
|
"loss": 1.4474, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 22.15, |
|
"grad_norm": 0.5935031175613403, |
|
"learning_rate": 0.001114074074074074, |
|
"loss": 1.4372, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 22.19, |
|
"grad_norm": 0.6634930372238159, |
|
"learning_rate": 0.0011125925925925925, |
|
"loss": 1.4715, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 22.22, |
|
"grad_norm": 0.6681028008460999, |
|
"learning_rate": 0.0011111111111111111, |
|
"loss": 1.5219, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 22.26, |
|
"grad_norm": 0.6176914572715759, |
|
"learning_rate": 0.0011096296296296295, |
|
"loss": 1.4901, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 22.3, |
|
"grad_norm": 0.6895126104354858, |
|
"learning_rate": 0.001108148148148148, |
|
"loss": 1.5168, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 22.33, |
|
"grad_norm": 0.633429765701294, |
|
"learning_rate": 0.0011066666666666666, |
|
"loss": 1.5659, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 22.37, |
|
"grad_norm": 0.6452445387840271, |
|
"learning_rate": 0.0011051851851851854, |
|
"loss": 1.556, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 22.41, |
|
"grad_norm": 0.5586777329444885, |
|
"learning_rate": 0.0011037037037037037, |
|
"loss": 1.5652, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 22.44, |
|
"grad_norm": 0.6635491251945496, |
|
"learning_rate": 0.0011022222222222223, |
|
"loss": 1.6015, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 22.48, |
|
"grad_norm": 0.6505566835403442, |
|
"learning_rate": 0.0011007407407407409, |
|
"loss": 1.5544, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 22.52, |
|
"grad_norm": 0.635430097579956, |
|
"learning_rate": 0.0010992592592592592, |
|
"loss": 1.5827, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 22.56, |
|
"grad_norm": 0.602946937084198, |
|
"learning_rate": 0.0010977777777777778, |
|
"loss": 1.5972, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 22.59, |
|
"grad_norm": 0.6203567981719971, |
|
"learning_rate": 0.0010962962962962964, |
|
"loss": 1.6029, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 22.63, |
|
"grad_norm": 0.6036391258239746, |
|
"learning_rate": 0.001094814814814815, |
|
"loss": 1.6075, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"grad_norm": 0.6256942749023438, |
|
"learning_rate": 0.0010933333333333333, |
|
"loss": 1.6119, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"grad_norm": 0.6039266586303711, |
|
"learning_rate": 0.0010918518518518519, |
|
"loss": 1.5997, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 22.74, |
|
"grad_norm": 0.6471481323242188, |
|
"learning_rate": 0.0010903703703703705, |
|
"loss": 1.6425, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 22.78, |
|
"grad_norm": 0.6322785019874573, |
|
"learning_rate": 0.0010888888888888888, |
|
"loss": 1.595, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 22.81, |
|
"grad_norm": 0.6034554839134216, |
|
"learning_rate": 0.0010874074074074074, |
|
"loss": 1.6375, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 22.85, |
|
"grad_norm": 0.636727511882782, |
|
"learning_rate": 0.001085925925925926, |
|
"loss": 1.6694, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 22.89, |
|
"grad_norm": 0.6205847859382629, |
|
"learning_rate": 0.0010844444444444445, |
|
"loss": 1.6411, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 22.93, |
|
"grad_norm": 0.6346337795257568, |
|
"learning_rate": 0.0010829629629629629, |
|
"loss": 1.6344, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"grad_norm": 0.6167390942573547, |
|
"learning_rate": 0.0010814814814814814, |
|
"loss": 1.6659, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"grad_norm": 1.001674771308899, |
|
"learning_rate": 0.00108, |
|
"loss": 1.6554, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 23.04, |
|
"grad_norm": 0.6658300757408142, |
|
"learning_rate": 0.0010785185185185184, |
|
"loss": 1.4168, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"grad_norm": 0.6166800856590271, |
|
"learning_rate": 0.001077037037037037, |
|
"loss": 1.4117, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 23.11, |
|
"grad_norm": 0.6343493461608887, |
|
"learning_rate": 0.0010755555555555557, |
|
"loss": 1.4265, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 23.15, |
|
"grad_norm": 0.6020140647888184, |
|
"learning_rate": 0.0010740740740740743, |
|
"loss": 1.4226, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 23.19, |
|
"grad_norm": 0.6626644134521484, |
|
"learning_rate": 0.0010725925925925927, |
|
"loss": 1.4645, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 23.22, |
|
"grad_norm": 0.7044302821159363, |
|
"learning_rate": 0.0010711111111111112, |
|
"loss": 1.4651, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 23.26, |
|
"grad_norm": 0.7018380165100098, |
|
"learning_rate": 0.0010696296296296298, |
|
"loss": 1.4781, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 23.3, |
|
"grad_norm": 0.6937040686607361, |
|
"learning_rate": 0.0010681481481481481, |
|
"loss": 1.5295, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 23.33, |
|
"grad_norm": 0.6271573901176453, |
|
"learning_rate": 0.0010666666666666667, |
|
"loss": 1.5178, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 23.37, |
|
"grad_norm": 0.6487247347831726, |
|
"learning_rate": 0.0010651851851851853, |
|
"loss": 1.5118, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 23.41, |
|
"grad_norm": 0.6135371327400208, |
|
"learning_rate": 0.0010637037037037036, |
|
"loss": 1.5239, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 23.44, |
|
"grad_norm": 0.6121995449066162, |
|
"learning_rate": 0.0010622222222222222, |
|
"loss": 1.538, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 23.48, |
|
"grad_norm": 0.619559645652771, |
|
"learning_rate": 0.0010607407407407408, |
|
"loss": 1.5498, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 23.52, |
|
"grad_norm": 0.6271837949752808, |
|
"learning_rate": 0.0010592592592592594, |
|
"loss": 1.5702, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 23.56, |
|
"grad_norm": 0.6492928266525269, |
|
"learning_rate": 0.0010577777777777777, |
|
"loss": 1.5458, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 23.59, |
|
"grad_norm": 0.6269136071205139, |
|
"learning_rate": 0.0010562962962962963, |
|
"loss": 1.5948, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 23.63, |
|
"grad_norm": 0.6717014908790588, |
|
"learning_rate": 0.0010548148148148149, |
|
"loss": 1.5589, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 23.67, |
|
"grad_norm": 0.6429439783096313, |
|
"learning_rate": 0.0010533333333333332, |
|
"loss": 1.5839, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 23.7, |
|
"grad_norm": 0.6275211572647095, |
|
"learning_rate": 0.0010518518518518518, |
|
"loss": 1.5769, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 23.74, |
|
"grad_norm": 0.6340334415435791, |
|
"learning_rate": 0.0010503703703703703, |
|
"loss": 1.5764, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 23.78, |
|
"grad_norm": 0.6724662780761719, |
|
"learning_rate": 0.001048888888888889, |
|
"loss": 1.6057, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 23.81, |
|
"grad_norm": 0.6650535464286804, |
|
"learning_rate": 0.0010474074074074073, |
|
"loss": 1.6207, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 23.85, |
|
"grad_norm": 0.6134965419769287, |
|
"learning_rate": 0.0010459259259259258, |
|
"loss": 1.5891, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 23.89, |
|
"grad_norm": 0.6735835671424866, |
|
"learning_rate": 0.0010444444444444446, |
|
"loss": 1.5959, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 23.93, |
|
"grad_norm": 0.6774803996086121, |
|
"learning_rate": 0.001042962962962963, |
|
"loss": 1.6223, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 23.96, |
|
"grad_norm": 0.6173749566078186, |
|
"learning_rate": 0.0010414814814814816, |
|
"loss": 1.6134, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"grad_norm": 1.0137830972671509, |
|
"learning_rate": 0.0010400000000000001, |
|
"loss": 1.6228, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 24.04, |
|
"grad_norm": 0.6443659067153931, |
|
"learning_rate": 0.0010385185185185187, |
|
"loss": 1.3545, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 24.07, |
|
"grad_norm": 0.6179562211036682, |
|
"learning_rate": 0.001037037037037037, |
|
"loss": 1.4058, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 24.11, |
|
"grad_norm": 0.6297662258148193, |
|
"learning_rate": 0.0010355555555555556, |
|
"loss": 1.386, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 24.15, |
|
"grad_norm": 0.6821028590202332, |
|
"learning_rate": 0.0010340740740740742, |
|
"loss": 1.3864, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 24.19, |
|
"grad_norm": 0.7033368349075317, |
|
"learning_rate": 0.0010325925925925925, |
|
"loss": 1.3966, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 24.22, |
|
"grad_norm": 0.6669091582298279, |
|
"learning_rate": 0.0010311111111111111, |
|
"loss": 1.4178, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 24.26, |
|
"grad_norm": 0.6988611817359924, |
|
"learning_rate": 0.0010296296296296297, |
|
"loss": 1.4648, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 24.3, |
|
"grad_norm": 0.6371246576309204, |
|
"learning_rate": 0.0010281481481481483, |
|
"loss": 1.4565, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 24.33, |
|
"grad_norm": 0.6623414754867554, |
|
"learning_rate": 0.0010266666666666666, |
|
"loss": 1.4721, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 24.37, |
|
"grad_norm": 0.6429168581962585, |
|
"learning_rate": 0.0010251851851851852, |
|
"loss": 1.444, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 24.41, |
|
"grad_norm": 0.6748073101043701, |
|
"learning_rate": 0.0010237037037037038, |
|
"loss": 1.5222, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 24.44, |
|
"grad_norm": 0.6635433435440063, |
|
"learning_rate": 0.0010222222222222221, |
|
"loss": 1.5101, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 24.48, |
|
"grad_norm": 0.7208848595619202, |
|
"learning_rate": 0.0010207407407407407, |
|
"loss": 1.5472, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 24.52, |
|
"grad_norm": 0.6758795380592346, |
|
"learning_rate": 0.0010192592592592593, |
|
"loss": 1.504, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 24.56, |
|
"grad_norm": 0.7414839863777161, |
|
"learning_rate": 0.0010177777777777776, |
|
"loss": 1.5277, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 24.59, |
|
"grad_norm": 0.6956931948661804, |
|
"learning_rate": 0.0010162962962962962, |
|
"loss": 1.5416, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 24.63, |
|
"grad_norm": 0.669710099697113, |
|
"learning_rate": 0.001014814814814815, |
|
"loss": 1.5459, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 24.67, |
|
"grad_norm": 0.6642506122589111, |
|
"learning_rate": 0.0010133333333333335, |
|
"loss": 1.5502, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 24.7, |
|
"grad_norm": 0.7576358914375305, |
|
"learning_rate": 0.0010118518518518519, |
|
"loss": 1.5497, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 24.74, |
|
"grad_norm": 0.6992085576057434, |
|
"learning_rate": 0.0010103703703703705, |
|
"loss": 1.5709, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 24.78, |
|
"grad_norm": 0.694860577583313, |
|
"learning_rate": 0.001008888888888889, |
|
"loss": 1.5626, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 24.81, |
|
"grad_norm": 0.6058640480041504, |
|
"learning_rate": 0.0010074074074074074, |
|
"loss": 1.5897, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 24.85, |
|
"grad_norm": 0.7034140229225159, |
|
"learning_rate": 0.001005925925925926, |
|
"loss": 1.5878, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 24.89, |
|
"grad_norm": 0.6275174617767334, |
|
"learning_rate": 0.0010044444444444445, |
|
"loss": 1.5975, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 24.93, |
|
"grad_norm": 0.6417464017868042, |
|
"learning_rate": 0.001002962962962963, |
|
"loss": 1.5888, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 24.96, |
|
"grad_norm": 0.5986396074295044, |
|
"learning_rate": 0.0010014814814814814, |
|
"loss": 1.6057, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"grad_norm": 1.4795289039611816, |
|
"learning_rate": 0.001, |
|
"loss": 1.5858, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 25.04, |
|
"grad_norm": 0.6829366087913513, |
|
"learning_rate": 0.0009985185185185186, |
|
"loss": 1.3255, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 25.07, |
|
"grad_norm": 0.6618345975875854, |
|
"learning_rate": 0.0009970370370370372, |
|
"loss": 1.3511, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 25.11, |
|
"grad_norm": 0.6847849488258362, |
|
"learning_rate": 0.0009955555555555555, |
|
"loss": 1.3619, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 25.15, |
|
"grad_norm": 0.7433702945709229, |
|
"learning_rate": 0.000994074074074074, |
|
"loss": 1.3861, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 25.19, |
|
"grad_norm": 0.7247262597084045, |
|
"learning_rate": 0.0009925925925925927, |
|
"loss": 1.4214, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 25.22, |
|
"grad_norm": 0.6940606236457825, |
|
"learning_rate": 0.0009911111111111112, |
|
"loss": 1.4373, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 25.26, |
|
"grad_norm": 0.6243559122085571, |
|
"learning_rate": 0.0009896296296296296, |
|
"loss": 1.419, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 25.3, |
|
"grad_norm": 0.7156795859336853, |
|
"learning_rate": 0.0009881481481481482, |
|
"loss": 1.4338, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 25.33, |
|
"grad_norm": 0.6805399060249329, |
|
"learning_rate": 0.0009866666666666667, |
|
"loss": 1.4278, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 25.37, |
|
"grad_norm": 0.7315900325775146, |
|
"learning_rate": 0.000985185185185185, |
|
"loss": 1.4578, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 25.41, |
|
"grad_norm": 0.6801655888557434, |
|
"learning_rate": 0.0009837037037037039, |
|
"loss": 1.4719, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 25.44, |
|
"grad_norm": 0.664529025554657, |
|
"learning_rate": 0.0009822222222222222, |
|
"loss": 1.4752, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 25.48, |
|
"grad_norm": 0.6608176231384277, |
|
"learning_rate": 0.0009807407407407408, |
|
"loss": 1.4745, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 25.52, |
|
"grad_norm": 0.7137537002563477, |
|
"learning_rate": 0.0009792592592592594, |
|
"loss": 1.4934, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 25.56, |
|
"grad_norm": 0.698538064956665, |
|
"learning_rate": 0.0009777777777777777, |
|
"loss": 1.4909, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 25.59, |
|
"grad_norm": 0.6954187750816345, |
|
"learning_rate": 0.0009762962962962964, |
|
"loss": 1.4949, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 25.63, |
|
"grad_norm": 0.7112638354301453, |
|
"learning_rate": 0.0009748148148148149, |
|
"loss": 1.5047, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 25.67, |
|
"grad_norm": 0.6482926607131958, |
|
"learning_rate": 0.0009733333333333334, |
|
"loss": 1.541, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 25.7, |
|
"grad_norm": 0.6862718462944031, |
|
"learning_rate": 0.0009718518518518519, |
|
"loss": 1.5217, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 25.74, |
|
"grad_norm": 0.7731064558029175, |
|
"learning_rate": 0.0009703703703703704, |
|
"loss": 1.5248, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 25.78, |
|
"grad_norm": 0.6840183138847351, |
|
"learning_rate": 0.0009688888888888889, |
|
"loss": 1.5454, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 25.81, |
|
"grad_norm": 0.6388102769851685, |
|
"learning_rate": 0.0009674074074074074, |
|
"loss": 1.5506, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 25.85, |
|
"grad_norm": 0.6708328127861023, |
|
"learning_rate": 0.000965925925925926, |
|
"loss": 1.5413, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 25.89, |
|
"grad_norm": 0.7057149410247803, |
|
"learning_rate": 0.0009644444444444444, |
|
"loss": 1.5626, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 25.93, |
|
"grad_norm": 0.6393432021141052, |
|
"learning_rate": 0.0009629629629629629, |
|
"loss": 1.5463, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 25.96, |
|
"grad_norm": 0.6737004518508911, |
|
"learning_rate": 0.0009614814814814816, |
|
"loss": 1.5552, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"grad_norm": 1.0224347114562988, |
|
"learning_rate": 0.00096, |
|
"loss": 1.5871, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 26.04, |
|
"grad_norm": 0.6330727934837341, |
|
"learning_rate": 0.0009585185185185186, |
|
"loss": 1.3241, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 26.07, |
|
"grad_norm": 0.6640604138374329, |
|
"learning_rate": 0.0009570370370370371, |
|
"loss": 1.3282, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 26.11, |
|
"grad_norm": 0.6334345936775208, |
|
"learning_rate": 0.0009555555555555556, |
|
"loss": 1.3204, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 26.15, |
|
"grad_norm": 0.6973744034767151, |
|
"learning_rate": 0.0009540740740740741, |
|
"loss": 1.3403, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 26.19, |
|
"grad_norm": 0.7151355147361755, |
|
"learning_rate": 0.0009525925925925926, |
|
"loss": 1.3856, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 26.22, |
|
"grad_norm": 0.7948924899101257, |
|
"learning_rate": 0.0009511111111111111, |
|
"loss": 1.3786, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 26.26, |
|
"grad_norm": 0.6959167122840881, |
|
"learning_rate": 0.0009496296296296296, |
|
"loss": 1.4085, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 26.3, |
|
"grad_norm": 0.6958004236221313, |
|
"learning_rate": 0.0009481481481481482, |
|
"loss": 1.4115, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 26.33, |
|
"grad_norm": 0.6725401282310486, |
|
"learning_rate": 0.0009466666666666667, |
|
"loss": 1.4026, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 26.37, |
|
"grad_norm": 0.7028361558914185, |
|
"learning_rate": 0.0009451851851851853, |
|
"loss": 1.4275, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 26.41, |
|
"grad_norm": 0.6960654258728027, |
|
"learning_rate": 0.0009437037037037038, |
|
"loss": 1.4398, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 26.44, |
|
"grad_norm": 0.7060914039611816, |
|
"learning_rate": 0.0009422222222222222, |
|
"loss": 1.4417, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 26.48, |
|
"grad_norm": 0.7186646461486816, |
|
"learning_rate": 0.0009407407407407408, |
|
"loss": 1.4622, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 26.52, |
|
"grad_norm": 0.7396743893623352, |
|
"learning_rate": 0.0009392592592592593, |
|
"loss": 1.471, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 26.56, |
|
"grad_norm": 0.6751599311828613, |
|
"learning_rate": 0.0009377777777777778, |
|
"loss": 1.4557, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 26.59, |
|
"grad_norm": 0.6983835697174072, |
|
"learning_rate": 0.0009362962962962963, |
|
"loss": 1.4358, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 26.63, |
|
"grad_norm": 0.6938909292221069, |
|
"learning_rate": 0.0009348148148148148, |
|
"loss": 1.4836, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 26.67, |
|
"grad_norm": 0.6815834641456604, |
|
"learning_rate": 0.0009333333333333333, |
|
"loss": 1.4937, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 26.7, |
|
"grad_norm": 0.699643611907959, |
|
"learning_rate": 0.0009318518518518518, |
|
"loss": 1.5198, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 26.74, |
|
"grad_norm": 0.6944101452827454, |
|
"learning_rate": 0.0009303703703703705, |
|
"loss": 1.4792, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 26.78, |
|
"grad_norm": 0.7341158390045166, |
|
"learning_rate": 0.0009288888888888889, |
|
"loss": 1.5256, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 26.81, |
|
"grad_norm": 0.6798523664474487, |
|
"learning_rate": 0.0009274074074074075, |
|
"loss": 1.5082, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 26.85, |
|
"grad_norm": 0.696607768535614, |
|
"learning_rate": 0.000925925925925926, |
|
"loss": 1.5197, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 26.89, |
|
"grad_norm": 0.6994819641113281, |
|
"learning_rate": 0.0009244444444444444, |
|
"loss": 1.4907, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 26.93, |
|
"grad_norm": 0.6782405972480774, |
|
"learning_rate": 0.000922962962962963, |
|
"loss": 1.5202, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 26.96, |
|
"grad_norm": 0.701532781124115, |
|
"learning_rate": 0.0009214814814814815, |
|
"loss": 1.5186, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"grad_norm": 1.0853149890899658, |
|
"learning_rate": 0.00092, |
|
"loss": 1.5585, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 27.04, |
|
"grad_norm": 0.6549865007400513, |
|
"learning_rate": 0.0009185185185185185, |
|
"loss": 1.2778, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 27.07, |
|
"grad_norm": 0.7133400440216064, |
|
"learning_rate": 0.000917037037037037, |
|
"loss": 1.2791, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 27.11, |
|
"grad_norm": 0.7217587828636169, |
|
"learning_rate": 0.0009155555555555556, |
|
"loss": 1.3171, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 27.15, |
|
"grad_norm": 0.6442558169364929, |
|
"learning_rate": 0.0009140740740740741, |
|
"loss": 1.3219, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 27.19, |
|
"grad_norm": 0.737054705619812, |
|
"learning_rate": 0.0009125925925925927, |
|
"loss": 1.3253, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 27.22, |
|
"grad_norm": 0.7420579195022583, |
|
"learning_rate": 0.0009111111111111111, |
|
"loss": 1.3616, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 27.26, |
|
"grad_norm": 0.745717465877533, |
|
"learning_rate": 0.0009096296296296297, |
|
"loss": 1.3606, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"grad_norm": 0.7737590074539185, |
|
"learning_rate": 0.0009081481481481482, |
|
"loss": 1.3716, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 27.33, |
|
"grad_norm": 0.7196997404098511, |
|
"learning_rate": 0.0009066666666666666, |
|
"loss": 1.3784, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 27.37, |
|
"grad_norm": 0.6725694537162781, |
|
"learning_rate": 0.0009051851851851852, |
|
"loss": 1.4092, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 27.41, |
|
"grad_norm": 0.7940300703048706, |
|
"learning_rate": 0.0009037037037037037, |
|
"loss": 1.4181, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 27.44, |
|
"grad_norm": 0.7204284071922302, |
|
"learning_rate": 0.0009022222222222222, |
|
"loss": 1.4147, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 27.48, |
|
"grad_norm": 0.6743030548095703, |
|
"learning_rate": 0.0009007407407407408, |
|
"loss": 1.4266, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 27.52, |
|
"grad_norm": 0.6738618612289429, |
|
"learning_rate": 0.0008992592592592594, |
|
"loss": 1.4105, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 27.56, |
|
"grad_norm": 0.7223931550979614, |
|
"learning_rate": 0.0008977777777777778, |
|
"loss": 1.4684, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 27.59, |
|
"grad_norm": 0.7275183200836182, |
|
"learning_rate": 0.0008962962962962963, |
|
"loss": 1.4572, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 27.63, |
|
"grad_norm": 0.7375164031982422, |
|
"learning_rate": 0.0008948148148148149, |
|
"loss": 1.4505, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 27.67, |
|
"grad_norm": 0.6762166023254395, |
|
"learning_rate": 0.0008933333333333333, |
|
"loss": 1.4677, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 27.7, |
|
"grad_norm": 0.7304455041885376, |
|
"learning_rate": 0.0008918518518518519, |
|
"loss": 1.4723, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 27.74, |
|
"grad_norm": 0.7225548028945923, |
|
"learning_rate": 0.0008903703703703704, |
|
"loss": 1.4739, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 27.78, |
|
"grad_norm": 0.7559143304824829, |
|
"learning_rate": 0.0008888888888888888, |
|
"loss": 1.4707, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 27.81, |
|
"grad_norm": 0.770778238773346, |
|
"learning_rate": 0.0008874074074074074, |
|
"loss": 1.4737, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 27.85, |
|
"grad_norm": 0.7308626770973206, |
|
"learning_rate": 0.000885925925925926, |
|
"loss": 1.4682, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 27.89, |
|
"grad_norm": 0.6600290536880493, |
|
"learning_rate": 0.0008844444444444445, |
|
"loss": 1.4645, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 27.93, |
|
"grad_norm": 0.7288909554481506, |
|
"learning_rate": 0.000882962962962963, |
|
"loss": 1.4978, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 27.96, |
|
"grad_norm": 0.7489798069000244, |
|
"learning_rate": 0.0008814814814814816, |
|
"loss": 1.4994, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"grad_norm": 1.4726680517196655, |
|
"learning_rate": 0.00088, |
|
"loss": 1.5065, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 28.04, |
|
"grad_norm": 0.7046101093292236, |
|
"learning_rate": 0.0008785185185185185, |
|
"loss": 1.2712, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 28.07, |
|
"grad_norm": 0.6764820218086243, |
|
"learning_rate": 0.0008770370370370371, |
|
"loss": 1.2717, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 28.11, |
|
"grad_norm": 0.713107705116272, |
|
"learning_rate": 0.0008755555555555555, |
|
"loss": 1.2657, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 28.15, |
|
"grad_norm": 0.7514472603797913, |
|
"learning_rate": 0.0008740740740740741, |
|
"loss": 1.2947, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 28.19, |
|
"grad_norm": 0.7262618541717529, |
|
"learning_rate": 0.0008725925925925926, |
|
"loss": 1.3297, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 28.22, |
|
"grad_norm": 0.8503907322883606, |
|
"learning_rate": 0.000871111111111111, |
|
"loss": 1.2874, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 28.26, |
|
"grad_norm": 0.7442007064819336, |
|
"learning_rate": 0.0008696296296296297, |
|
"loss": 1.3066, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 28.3, |
|
"grad_norm": 0.6790578365325928, |
|
"learning_rate": 0.0008681481481481482, |
|
"loss": 1.3362, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 28.33, |
|
"grad_norm": 0.7511619925498962, |
|
"learning_rate": 0.0008666666666666667, |
|
"loss": 1.3415, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 28.37, |
|
"grad_norm": 0.7373079657554626, |
|
"learning_rate": 0.0008651851851851852, |
|
"loss": 1.3483, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 28.41, |
|
"grad_norm": 0.7537468075752258, |
|
"learning_rate": 0.0008637037037037038, |
|
"loss": 1.3976, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 28.44, |
|
"grad_norm": 0.758500337600708, |
|
"learning_rate": 0.0008622222222222222, |
|
"loss": 1.3929, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 28.48, |
|
"grad_norm": 0.7068962454795837, |
|
"learning_rate": 0.0008607407407407407, |
|
"loss": 1.3989, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 28.52, |
|
"grad_norm": 0.7316468954086304, |
|
"learning_rate": 0.0008592592592592593, |
|
"loss": 1.4046, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 28.56, |
|
"grad_norm": 0.7010143995285034, |
|
"learning_rate": 0.0008577777777777777, |
|
"loss": 1.4199, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 28.59, |
|
"grad_norm": 0.7400304675102234, |
|
"learning_rate": 0.0008562962962962963, |
|
"loss": 1.4107, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 28.63, |
|
"grad_norm": 0.7666124105453491, |
|
"learning_rate": 0.0008548148148148149, |
|
"loss": 1.4561, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 28.67, |
|
"grad_norm": 0.7306181192398071, |
|
"learning_rate": 0.0008533333333333334, |
|
"loss": 1.4353, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 28.7, |
|
"grad_norm": 0.7335529327392578, |
|
"learning_rate": 0.0008518518518518519, |
|
"loss": 1.4214, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 28.74, |
|
"grad_norm": 0.7898188829421997, |
|
"learning_rate": 0.0008503703703703704, |
|
"loss": 1.4253, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 28.78, |
|
"grad_norm": 0.7438576817512512, |
|
"learning_rate": 0.0008488888888888889, |
|
"loss": 1.4307, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 28.81, |
|
"grad_norm": 0.7355637550354004, |
|
"learning_rate": 0.0008474074074074074, |
|
"loss": 1.4224, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 28.85, |
|
"grad_norm": 0.756335973739624, |
|
"learning_rate": 0.000845925925925926, |
|
"loss": 1.4869, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 28.89, |
|
"grad_norm": 0.6840737462043762, |
|
"learning_rate": 0.0008444444444444444, |
|
"loss": 1.4612, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 28.93, |
|
"grad_norm": 0.7832042574882507, |
|
"learning_rate": 0.0008429629629629629, |
|
"loss": 1.4442, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 28.96, |
|
"grad_norm": 0.7849950194358826, |
|
"learning_rate": 0.0008414814814814815, |
|
"loss": 1.4796, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"grad_norm": 1.6340038776397705, |
|
"learning_rate": 0.00084, |
|
"loss": 1.4447, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 29.04, |
|
"grad_norm": 0.7272326350212097, |
|
"learning_rate": 0.0008385185185185186, |
|
"loss": 1.2243, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 29.07, |
|
"grad_norm": 0.796206533908844, |
|
"learning_rate": 0.0008370370370370371, |
|
"loss": 1.2332, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 29.11, |
|
"grad_norm": 0.7212488651275635, |
|
"learning_rate": 0.0008355555555555556, |
|
"loss": 1.2729, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 29.15, |
|
"grad_norm": 0.7437489032745361, |
|
"learning_rate": 0.0008340740740740741, |
|
"loss": 1.2607, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 29.19, |
|
"grad_norm": 0.7367982864379883, |
|
"learning_rate": 0.0008325925925925926, |
|
"loss": 1.2863, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"grad_norm": 0.688751220703125, |
|
"learning_rate": 0.0008311111111111111, |
|
"loss": 1.302, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 29.26, |
|
"grad_norm": 0.7412949800491333, |
|
"learning_rate": 0.0008296296296296296, |
|
"loss": 1.2973, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 29.3, |
|
"grad_norm": 0.7355971336364746, |
|
"learning_rate": 0.0008281481481481482, |
|
"loss": 1.3144, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 29.33, |
|
"grad_norm": 0.7711012363433838, |
|
"learning_rate": 0.0008266666666666666, |
|
"loss": 1.3428, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 29.37, |
|
"grad_norm": 0.7146042585372925, |
|
"learning_rate": 0.0008251851851851852, |
|
"loss": 1.3259, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 29.41, |
|
"grad_norm": 0.7842440605163574, |
|
"learning_rate": 0.0008237037037037038, |
|
"loss": 1.3353, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 29.44, |
|
"grad_norm": 0.7185430526733398, |
|
"learning_rate": 0.0008222222222222222, |
|
"loss": 1.3476, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 29.48, |
|
"grad_norm": 0.7307253479957581, |
|
"learning_rate": 0.0008207407407407408, |
|
"loss": 1.3576, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 29.52, |
|
"grad_norm": 0.7476086020469666, |
|
"learning_rate": 0.0008192592592592593, |
|
"loss": 1.3491, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 29.56, |
|
"grad_norm": 0.773776113986969, |
|
"learning_rate": 0.0008177777777777778, |
|
"loss": 1.3646, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 29.59, |
|
"grad_norm": 0.7761340141296387, |
|
"learning_rate": 0.0008162962962962963, |
|
"loss": 1.383, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 29.63, |
|
"grad_norm": 0.7412744760513306, |
|
"learning_rate": 0.0008148148148148148, |
|
"loss": 1.4269, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 29.67, |
|
"grad_norm": 0.7098486423492432, |
|
"learning_rate": 0.0008133333333333333, |
|
"loss": 1.3804, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 29.7, |
|
"grad_norm": 0.7861642241477966, |
|
"learning_rate": 0.0008118518518518518, |
|
"loss": 1.4102, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 29.74, |
|
"grad_norm": 0.7670470476150513, |
|
"learning_rate": 0.0008103703703703705, |
|
"loss": 1.3886, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 29.78, |
|
"grad_norm": 0.840653121471405, |
|
"learning_rate": 0.0008088888888888889, |
|
"loss": 1.4109, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 29.81, |
|
"grad_norm": 0.7671210169792175, |
|
"learning_rate": 0.0008074074074074075, |
|
"loss": 1.4307, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 29.85, |
|
"grad_norm": 0.7228606343269348, |
|
"learning_rate": 0.000805925925925926, |
|
"loss": 1.4196, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 29.89, |
|
"grad_norm": 0.6415588855743408, |
|
"learning_rate": 0.0008044444444444444, |
|
"loss": 1.421, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 29.93, |
|
"grad_norm": 0.780892014503479, |
|
"learning_rate": 0.000802962962962963, |
|
"loss": 1.439, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 29.96, |
|
"grad_norm": 0.7971543073654175, |
|
"learning_rate": 0.0008014814814814815, |
|
"loss": 1.425, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"grad_norm": 1.307153344154358, |
|
"learning_rate": 0.0008, |
|
"loss": 1.4416, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 30.04, |
|
"grad_norm": 0.7271667122840881, |
|
"learning_rate": 0.0007985185185185185, |
|
"loss": 1.1745, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 30.07, |
|
"grad_norm": 0.7366812825202942, |
|
"learning_rate": 0.000797037037037037, |
|
"loss": 1.2231, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 30.11, |
|
"grad_norm": 0.8225249648094177, |
|
"learning_rate": 0.0007955555555555555, |
|
"loss": 1.2032, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 30.15, |
|
"grad_norm": 0.7685932517051697, |
|
"learning_rate": 0.0007940740740740741, |
|
"loss": 1.2319, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 30.19, |
|
"grad_norm": 0.6946758031845093, |
|
"learning_rate": 0.0007925925925925927, |
|
"loss": 1.2392, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 30.22, |
|
"grad_norm": 0.7242510318756104, |
|
"learning_rate": 0.0007911111111111111, |
|
"loss": 1.2644, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 30.26, |
|
"grad_norm": 0.7538670301437378, |
|
"learning_rate": 0.0007896296296296297, |
|
"loss": 1.285, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 30.3, |
|
"grad_norm": 0.7838382720947266, |
|
"learning_rate": 0.0007881481481481482, |
|
"loss": 1.2723, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 30.33, |
|
"grad_norm": 0.7739988565444946, |
|
"learning_rate": 0.0007866666666666666, |
|
"loss": 1.288, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 30.37, |
|
"grad_norm": 0.758450448513031, |
|
"learning_rate": 0.0007851851851851852, |
|
"loss": 1.3066, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 30.41, |
|
"grad_norm": 0.7945404052734375, |
|
"learning_rate": 0.0007837037037037037, |
|
"loss": 1.3066, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 30.44, |
|
"grad_norm": 0.8125270009040833, |
|
"learning_rate": 0.0007822222222222222, |
|
"loss": 1.3481, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 30.48, |
|
"grad_norm": 0.7620575428009033, |
|
"learning_rate": 0.0007807407407407407, |
|
"loss": 1.3177, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 30.52, |
|
"grad_norm": 0.7492615580558777, |
|
"learning_rate": 0.0007792592592592593, |
|
"loss": 1.3379, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 30.56, |
|
"grad_norm": 0.7885991334915161, |
|
"learning_rate": 0.0007777777777777778, |
|
"loss": 1.3671, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 30.59, |
|
"grad_norm": 0.8088184595108032, |
|
"learning_rate": 0.0007762962962962963, |
|
"loss": 1.3576, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 30.63, |
|
"grad_norm": 0.7370725274085999, |
|
"learning_rate": 0.0007748148148148149, |
|
"loss": 1.3592, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 30.67, |
|
"grad_norm": 0.8647828102111816, |
|
"learning_rate": 0.0007733333333333333, |
|
"loss": 1.3471, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 30.7, |
|
"grad_norm": 0.8166521787643433, |
|
"learning_rate": 0.0007718518518518519, |
|
"loss": 1.3605, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 30.74, |
|
"grad_norm": 0.7414300441741943, |
|
"learning_rate": 0.0007703703703703704, |
|
"loss": 1.3433, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 30.78, |
|
"grad_norm": 0.8628682494163513, |
|
"learning_rate": 0.0007688888888888888, |
|
"loss": 1.3999, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 30.81, |
|
"grad_norm": 0.7411145567893982, |
|
"learning_rate": 0.0007674074074074074, |
|
"loss": 1.3844, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 30.85, |
|
"grad_norm": 0.7552719116210938, |
|
"learning_rate": 0.0007659259259259259, |
|
"loss": 1.4042, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 30.89, |
|
"grad_norm": 0.7058090567588806, |
|
"learning_rate": 0.0007644444444444445, |
|
"loss": 1.4025, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 30.93, |
|
"grad_norm": 0.7776255011558533, |
|
"learning_rate": 0.000762962962962963, |
|
"loss": 1.3931, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 30.96, |
|
"grad_norm": 0.7906094193458557, |
|
"learning_rate": 0.0007614814814814816, |
|
"loss": 1.4201, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"grad_norm": 1.9872336387634277, |
|
"learning_rate": 0.00076, |
|
"loss": 1.4128, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 31.04, |
|
"grad_norm": 0.7633602619171143, |
|
"learning_rate": 0.0007585185185185185, |
|
"loss": 1.1678, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 31.07, |
|
"grad_norm": 0.7451400756835938, |
|
"learning_rate": 0.0007570370370370371, |
|
"loss": 1.1833, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 31.11, |
|
"grad_norm": 0.7801716327667236, |
|
"learning_rate": 0.0007555555555555555, |
|
"loss": 1.1863, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 31.15, |
|
"grad_norm": 0.8019985556602478, |
|
"learning_rate": 0.0007540740740740741, |
|
"loss": 1.2048, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 31.19, |
|
"grad_norm": 0.7380914092063904, |
|
"learning_rate": 0.0007525925925925926, |
|
"loss": 1.2158, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 31.22, |
|
"grad_norm": 0.8214986324310303, |
|
"learning_rate": 0.000751111111111111, |
|
"loss": 1.2368, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 31.26, |
|
"grad_norm": 0.8278571963310242, |
|
"learning_rate": 0.0007496296296296297, |
|
"loss": 1.2449, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 31.3, |
|
"grad_norm": 0.8095946907997131, |
|
"learning_rate": 0.0007481481481481482, |
|
"loss": 1.2509, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 31.33, |
|
"grad_norm": 0.701068639755249, |
|
"learning_rate": 0.0007466666666666667, |
|
"loss": 1.2798, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 31.37, |
|
"grad_norm": 0.7845342755317688, |
|
"learning_rate": 0.0007451851851851852, |
|
"loss": 1.2811, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 31.41, |
|
"grad_norm": 0.7937617301940918, |
|
"learning_rate": 0.0007437037037037038, |
|
"loss": 1.2757, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 31.44, |
|
"grad_norm": 0.8310158848762512, |
|
"learning_rate": 0.0007422222222222222, |
|
"loss": 1.2953, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 31.48, |
|
"grad_norm": 0.8204872012138367, |
|
"learning_rate": 0.0007407407407407407, |
|
"loss": 1.2792, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 31.52, |
|
"grad_norm": 0.746772050857544, |
|
"learning_rate": 0.0007392592592592593, |
|
"loss": 1.2849, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 31.56, |
|
"grad_norm": 0.7524877190589905, |
|
"learning_rate": 0.0007377777777777777, |
|
"loss": 1.3163, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 31.59, |
|
"grad_norm": 0.8254786729812622, |
|
"learning_rate": 0.0007362962962962963, |
|
"loss": 1.3039, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 31.63, |
|
"grad_norm": 0.7942031025886536, |
|
"learning_rate": 0.0007348148148148149, |
|
"loss": 1.3309, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 31.67, |
|
"grad_norm": 0.746993899345398, |
|
"learning_rate": 0.0007333333333333333, |
|
"loss": 1.3345, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 31.7, |
|
"grad_norm": 0.7152719497680664, |
|
"learning_rate": 0.0007318518518518519, |
|
"loss": 1.3433, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 31.74, |
|
"grad_norm": 0.8452175855636597, |
|
"learning_rate": 0.0007303703703703704, |
|
"loss": 1.3443, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 31.78, |
|
"grad_norm": 0.7857072949409485, |
|
"learning_rate": 0.0007288888888888889, |
|
"loss": 1.3459, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 31.81, |
|
"grad_norm": 0.8009936809539795, |
|
"learning_rate": 0.0007274074074074074, |
|
"loss": 1.3455, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 31.85, |
|
"grad_norm": 0.9043723940849304, |
|
"learning_rate": 0.000725925925925926, |
|
"loss": 1.3832, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 31.89, |
|
"grad_norm": 0.7199929356575012, |
|
"learning_rate": 0.0007244444444444444, |
|
"loss": 1.362, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 31.93, |
|
"grad_norm": 0.7644399404525757, |
|
"learning_rate": 0.0007229629629629629, |
|
"loss": 1.3514, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 31.96, |
|
"grad_norm": 0.7020518183708191, |
|
"learning_rate": 0.0007214814814814815, |
|
"loss": 1.3749, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"grad_norm": 1.4921846389770508, |
|
"learning_rate": 0.0007199999999999999, |
|
"loss": 1.405, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 32.04, |
|
"grad_norm": 0.7334415912628174, |
|
"learning_rate": 0.0007185185185185186, |
|
"loss": 1.1488, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 32.07, |
|
"grad_norm": 0.7583827376365662, |
|
"learning_rate": 0.0007170370370370371, |
|
"loss": 1.175, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 32.11, |
|
"grad_norm": 0.755535900592804, |
|
"learning_rate": 0.0007155555555555555, |
|
"loss": 1.1634, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 32.15, |
|
"grad_norm": 0.7786100506782532, |
|
"learning_rate": 0.0007140740740740741, |
|
"loss": 1.1713, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 32.19, |
|
"grad_norm": 0.7711645364761353, |
|
"learning_rate": 0.0007125925925925926, |
|
"loss": 1.1811, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 32.22, |
|
"grad_norm": 0.8045185804367065, |
|
"learning_rate": 0.0007111111111111111, |
|
"loss": 1.2022, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 32.26, |
|
"grad_norm": 0.8078054189682007, |
|
"learning_rate": 0.0007096296296296296, |
|
"loss": 1.2254, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 32.3, |
|
"grad_norm": 0.7914540767669678, |
|
"learning_rate": 0.0007081481481481482, |
|
"loss": 1.2144, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 32.33, |
|
"grad_norm": 0.7771093249320984, |
|
"learning_rate": 0.0007066666666666666, |
|
"loss": 1.2267, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 32.37, |
|
"grad_norm": 0.8438934087753296, |
|
"learning_rate": 0.0007051851851851851, |
|
"loss": 1.2378, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 32.41, |
|
"grad_norm": 0.8273505568504333, |
|
"learning_rate": 0.0007037037037037038, |
|
"loss": 1.2385, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 32.44, |
|
"grad_norm": 0.7227649688720703, |
|
"learning_rate": 0.0007022222222222222, |
|
"loss": 1.2618, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 32.48, |
|
"grad_norm": 0.7891519069671631, |
|
"learning_rate": 0.0007007407407407408, |
|
"loss": 1.2637, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 32.52, |
|
"grad_norm": 0.8562486171722412, |
|
"learning_rate": 0.0006992592592592593, |
|
"loss": 1.292, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 32.56, |
|
"grad_norm": 0.722166895866394, |
|
"learning_rate": 0.0006977777777777778, |
|
"loss": 1.2806, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 32.59, |
|
"grad_norm": 0.8187007904052734, |
|
"learning_rate": 0.0006962962962962963, |
|
"loss": 1.2906, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 32.63, |
|
"grad_norm": 0.8709007501602173, |
|
"learning_rate": 0.0006948148148148148, |
|
"loss": 1.2941, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 32.67, |
|
"grad_norm": 0.8180269002914429, |
|
"learning_rate": 0.0006933333333333333, |
|
"loss": 1.2994, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 32.7, |
|
"grad_norm": 0.8007955551147461, |
|
"learning_rate": 0.0006918518518518518, |
|
"loss": 1.2668, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 32.74, |
|
"grad_norm": 0.8287192583084106, |
|
"learning_rate": 0.0006903703703703704, |
|
"loss": 1.32, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 32.78, |
|
"grad_norm": 0.8008798956871033, |
|
"learning_rate": 0.000688888888888889, |
|
"loss": 1.3194, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 32.81, |
|
"grad_norm": 0.7793354988098145, |
|
"learning_rate": 0.0006874074074074074, |
|
"loss": 1.3138, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 32.85, |
|
"grad_norm": 0.8135443925857544, |
|
"learning_rate": 0.000685925925925926, |
|
"loss": 1.3349, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 32.89, |
|
"grad_norm": 0.8550287485122681, |
|
"learning_rate": 0.0006844444444444444, |
|
"loss": 1.358, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 32.93, |
|
"grad_norm": 0.8146428465843201, |
|
"learning_rate": 0.000682962962962963, |
|
"loss": 1.347, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 32.96, |
|
"grad_norm": 0.8575021028518677, |
|
"learning_rate": 0.0006814814814814815, |
|
"loss": 1.3261, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"grad_norm": 1.6308609247207642, |
|
"learning_rate": 0.00068, |
|
"loss": 1.3611, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 33.04, |
|
"grad_norm": 0.7819394469261169, |
|
"learning_rate": 0.0006785185185185185, |
|
"loss": 1.1404, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 33.07, |
|
"grad_norm": 0.7906447052955627, |
|
"learning_rate": 0.000677037037037037, |
|
"loss": 1.1216, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 33.11, |
|
"grad_norm": 0.7358431816101074, |
|
"learning_rate": 0.0006755555555555555, |
|
"loss": 1.1406, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 33.15, |
|
"grad_norm": 0.778339147567749, |
|
"learning_rate": 0.0006740740740740741, |
|
"loss": 1.1622, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 33.19, |
|
"grad_norm": 0.7508297562599182, |
|
"learning_rate": 0.0006725925925925927, |
|
"loss": 1.1778, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 33.22, |
|
"grad_norm": 0.830898106098175, |
|
"learning_rate": 0.0006711111111111111, |
|
"loss": 1.1648, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 33.26, |
|
"grad_norm": 0.8099901080131531, |
|
"learning_rate": 0.0006696296296296296, |
|
"loss": 1.1866, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 33.3, |
|
"grad_norm": 0.8449798822402954, |
|
"learning_rate": 0.0006681481481481482, |
|
"loss": 1.1964, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"grad_norm": 0.7969886660575867, |
|
"learning_rate": 0.0006666666666666666, |
|
"loss": 1.1974, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 33.37, |
|
"grad_norm": 0.809606671333313, |
|
"learning_rate": 0.0006651851851851852, |
|
"loss": 1.1999, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 33.41, |
|
"grad_norm": 0.8491615653038025, |
|
"learning_rate": 0.0006637037037037037, |
|
"loss": 1.2246, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 33.44, |
|
"grad_norm": 0.8338679671287537, |
|
"learning_rate": 0.0006622222222222222, |
|
"loss": 1.2387, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 33.48, |
|
"grad_norm": 0.7784556746482849, |
|
"learning_rate": 0.0006607407407407407, |
|
"loss": 1.2544, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 33.52, |
|
"grad_norm": 0.8017482757568359, |
|
"learning_rate": 0.0006592592592592592, |
|
"loss": 1.2187, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 33.56, |
|
"grad_norm": 0.8610512018203735, |
|
"learning_rate": 0.0006577777777777779, |
|
"loss": 1.239, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 33.59, |
|
"grad_norm": 0.7477060556411743, |
|
"learning_rate": 0.0006562962962962963, |
|
"loss": 1.2626, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 33.63, |
|
"grad_norm": 0.8773489594459534, |
|
"learning_rate": 0.0006548148148148149, |
|
"loss": 1.2678, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 33.67, |
|
"grad_norm": 0.8236984610557556, |
|
"learning_rate": 0.0006533333333333333, |
|
"loss": 1.275, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 33.7, |
|
"grad_norm": 0.799561083316803, |
|
"learning_rate": 0.0006518518518518519, |
|
"loss": 1.273, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 33.74, |
|
"grad_norm": 0.8354243040084839, |
|
"learning_rate": 0.0006503703703703704, |
|
"loss": 1.2556, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 33.78, |
|
"grad_norm": 0.7642504572868347, |
|
"learning_rate": 0.0006488888888888888, |
|
"loss": 1.2839, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 33.81, |
|
"grad_norm": 0.8058938384056091, |
|
"learning_rate": 0.0006474074074074074, |
|
"loss": 1.2844, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 33.85, |
|
"grad_norm": 0.8240184187889099, |
|
"learning_rate": 0.0006459259259259259, |
|
"loss": 1.2444, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 33.89, |
|
"grad_norm": 0.7965198755264282, |
|
"learning_rate": 0.0006444444444444444, |
|
"loss": 1.302, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 33.93, |
|
"grad_norm": 0.807142436504364, |
|
"learning_rate": 0.000642962962962963, |
|
"loss": 1.2898, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 33.96, |
|
"grad_norm": 0.8767658472061157, |
|
"learning_rate": 0.0006414814814814815, |
|
"loss": 1.3294, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"grad_norm": 1.232200264930725, |
|
"learning_rate": 0.00064, |
|
"loss": 1.3133, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 34.04, |
|
"grad_norm": 0.7402983903884888, |
|
"learning_rate": 0.0006385185185185185, |
|
"loss": 1.0779, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 34.07, |
|
"grad_norm": 0.8266010880470276, |
|
"learning_rate": 0.0006370370370370371, |
|
"loss": 1.1209, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 34.11, |
|
"grad_norm": 0.7701414823532104, |
|
"learning_rate": 0.0006355555555555555, |
|
"loss": 1.1255, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 34.15, |
|
"grad_norm": 0.7860375642776489, |
|
"learning_rate": 0.0006340740740740741, |
|
"loss": 1.1165, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 34.19, |
|
"grad_norm": 0.8956103920936584, |
|
"learning_rate": 0.0006325925925925926, |
|
"loss": 1.0995, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 34.22, |
|
"grad_norm": 0.7913826107978821, |
|
"learning_rate": 0.000631111111111111, |
|
"loss": 1.1349, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 34.26, |
|
"grad_norm": 0.8237706422805786, |
|
"learning_rate": 0.0006296296296296296, |
|
"loss": 1.1396, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 34.3, |
|
"grad_norm": 0.7774602770805359, |
|
"learning_rate": 0.0006281481481481482, |
|
"loss": 1.162, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 34.33, |
|
"grad_norm": 0.8474662899971008, |
|
"learning_rate": 0.0006266666666666668, |
|
"loss": 1.1747, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 34.37, |
|
"grad_norm": 0.9087252616882324, |
|
"learning_rate": 0.0006251851851851852, |
|
"loss": 1.1785, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 34.41, |
|
"grad_norm": 0.8514711260795593, |
|
"learning_rate": 0.0006237037037037037, |
|
"loss": 1.2354, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 34.44, |
|
"grad_norm": 0.8436718583106995, |
|
"learning_rate": 0.0006222222222222223, |
|
"loss": 1.1793, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 34.48, |
|
"grad_norm": 0.9170453548431396, |
|
"learning_rate": 0.0006207407407407407, |
|
"loss": 1.1807, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 34.52, |
|
"grad_norm": 0.9778527021408081, |
|
"learning_rate": 0.0006192592592592593, |
|
"loss": 1.2231, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 34.56, |
|
"grad_norm": 0.8001771569252014, |
|
"learning_rate": 0.0006177777777777777, |
|
"loss": 1.2107, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 34.59, |
|
"grad_norm": 0.7859574556350708, |
|
"learning_rate": 0.0006162962962962963, |
|
"loss": 1.2242, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 34.63, |
|
"grad_norm": 0.8739088773727417, |
|
"learning_rate": 0.0006148148148148148, |
|
"loss": 1.2294, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 34.67, |
|
"grad_norm": 0.8462000489234924, |
|
"learning_rate": 0.0006133333333333334, |
|
"loss": 1.2335, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 34.7, |
|
"grad_norm": 0.9332739114761353, |
|
"learning_rate": 0.0006118518518518519, |
|
"loss": 1.2425, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 34.74, |
|
"grad_norm": 0.8226819038391113, |
|
"learning_rate": 0.0006103703703703704, |
|
"loss": 1.255, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 34.78, |
|
"grad_norm": 0.8331789374351501, |
|
"learning_rate": 0.000608888888888889, |
|
"loss": 1.2433, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 34.81, |
|
"grad_norm": 0.8723573684692383, |
|
"learning_rate": 0.0006074074074074074, |
|
"loss": 1.2628, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 34.85, |
|
"grad_norm": 0.8315209746360779, |
|
"learning_rate": 0.000605925925925926, |
|
"loss": 1.2484, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 34.89, |
|
"grad_norm": 0.8821229338645935, |
|
"learning_rate": 0.0006044444444444445, |
|
"loss": 1.2668, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 34.93, |
|
"grad_norm": 0.8588849306106567, |
|
"learning_rate": 0.0006029629629629629, |
|
"loss": 1.2643, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 34.96, |
|
"grad_norm": 0.8674469590187073, |
|
"learning_rate": 0.0006014814814814815, |
|
"loss": 1.2944, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"grad_norm": 1.8903077840805054, |
|
"learning_rate": 0.0006, |
|
"loss": 1.2972, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 35.04, |
|
"grad_norm": 0.7540388107299805, |
|
"learning_rate": 0.0005985185185185186, |
|
"loss": 1.0662, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 35.07, |
|
"grad_norm": 0.8312435150146484, |
|
"learning_rate": 0.0005970370370370371, |
|
"loss": 1.0752, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 35.11, |
|
"grad_norm": 0.8651759624481201, |
|
"learning_rate": 0.0005955555555555556, |
|
"loss": 1.0925, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 35.15, |
|
"grad_norm": 0.7402911186218262, |
|
"learning_rate": 0.0005940740740740741, |
|
"loss": 1.0936, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 35.19, |
|
"grad_norm": 0.7958086133003235, |
|
"learning_rate": 0.0005925925925925926, |
|
"loss": 1.0865, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 35.22, |
|
"grad_norm": 0.773444652557373, |
|
"learning_rate": 0.0005911111111111112, |
|
"loss": 1.1231, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 35.26, |
|
"grad_norm": 0.8964213132858276, |
|
"learning_rate": 0.0005896296296296296, |
|
"loss": 1.1239, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 35.3, |
|
"grad_norm": 0.8228880167007446, |
|
"learning_rate": 0.0005881481481481482, |
|
"loss": 1.1283, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 35.33, |
|
"grad_norm": 0.8290715217590332, |
|
"learning_rate": 0.0005866666666666667, |
|
"loss": 1.1549, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 35.37, |
|
"grad_norm": 0.7984841465950012, |
|
"learning_rate": 0.0005851851851851851, |
|
"loss": 1.1152, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 35.41, |
|
"grad_norm": 0.9492020606994629, |
|
"learning_rate": 0.0005837037037037037, |
|
"loss": 1.1375, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 35.44, |
|
"grad_norm": 0.9132887721061707, |
|
"learning_rate": 0.0005822222222222223, |
|
"loss": 1.1848, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 35.48, |
|
"grad_norm": 0.9160571098327637, |
|
"learning_rate": 0.0005807407407407408, |
|
"loss": 1.1619, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 35.52, |
|
"grad_norm": 0.8078466057777405, |
|
"learning_rate": 0.0005792592592592593, |
|
"loss": 1.217, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 35.56, |
|
"grad_norm": 0.8517169952392578, |
|
"learning_rate": 0.0005777777777777778, |
|
"loss": 1.1829, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 35.59, |
|
"grad_norm": 0.8791046142578125, |
|
"learning_rate": 0.0005762962962962963, |
|
"loss": 1.1896, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 35.63, |
|
"grad_norm": 0.9342435002326965, |
|
"learning_rate": 0.0005748148148148148, |
|
"loss": 1.2028, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 35.67, |
|
"grad_norm": 0.884878933429718, |
|
"learning_rate": 0.0005733333333333334, |
|
"loss": 1.202, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 35.7, |
|
"grad_norm": 0.8292385935783386, |
|
"learning_rate": 0.0005718518518518518, |
|
"loss": 1.2376, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 35.74, |
|
"grad_norm": 0.8106871843338013, |
|
"learning_rate": 0.0005703703703703704, |
|
"loss": 1.1888, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 35.78, |
|
"grad_norm": 0.8425321578979492, |
|
"learning_rate": 0.0005688888888888889, |
|
"loss": 1.2282, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 35.81, |
|
"grad_norm": 0.8284263610839844, |
|
"learning_rate": 0.0005674074074074074, |
|
"loss": 1.2185, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 35.85, |
|
"grad_norm": 0.8811337947845459, |
|
"learning_rate": 0.000565925925925926, |
|
"loss": 1.2189, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 35.89, |
|
"grad_norm": 0.8189839124679565, |
|
"learning_rate": 0.0005644444444444445, |
|
"loss": 1.2378, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 35.93, |
|
"grad_norm": 0.9154495000839233, |
|
"learning_rate": 0.000562962962962963, |
|
"loss": 1.2155, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 35.96, |
|
"grad_norm": 0.84986412525177, |
|
"learning_rate": 0.0005614814814814815, |
|
"loss": 1.2282, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"grad_norm": 1.4053990840911865, |
|
"learning_rate": 0.0005600000000000001, |
|
"loss": 1.2341, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 36.04, |
|
"grad_norm": 0.8471843004226685, |
|
"learning_rate": 0.0005585185185185185, |
|
"loss": 1.0177, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 36.07, |
|
"grad_norm": 0.8182628154754639, |
|
"learning_rate": 0.000557037037037037, |
|
"loss": 1.0433, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 36.11, |
|
"grad_norm": 0.8502013087272644, |
|
"learning_rate": 0.0005555555555555556, |
|
"loss": 1.102, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 36.15, |
|
"grad_norm": 0.8067908883094788, |
|
"learning_rate": 0.000554074074074074, |
|
"loss": 1.0811, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 36.19, |
|
"grad_norm": 0.8608354926109314, |
|
"learning_rate": 0.0005525925925925927, |
|
"loss": 1.0716, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 36.22, |
|
"grad_norm": 0.8624645471572876, |
|
"learning_rate": 0.0005511111111111112, |
|
"loss": 1.0711, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 36.26, |
|
"grad_norm": 0.8814002871513367, |
|
"learning_rate": 0.0005496296296296296, |
|
"loss": 1.0988, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 36.3, |
|
"grad_norm": 0.796582043170929, |
|
"learning_rate": 0.0005481481481481482, |
|
"loss": 1.0714, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 36.33, |
|
"grad_norm": 0.8638683557510376, |
|
"learning_rate": 0.0005466666666666667, |
|
"loss": 1.1021, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 36.37, |
|
"grad_norm": 1.0243921279907227, |
|
"learning_rate": 0.0005451851851851852, |
|
"loss": 1.1143, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 36.41, |
|
"grad_norm": 0.9122822284698486, |
|
"learning_rate": 0.0005437037037037037, |
|
"loss": 1.1485, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 36.44, |
|
"grad_norm": 0.7532286047935486, |
|
"learning_rate": 0.0005422222222222223, |
|
"loss": 1.1323, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 36.48, |
|
"grad_norm": 0.8187335729598999, |
|
"learning_rate": 0.0005407407407407407, |
|
"loss": 1.133, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 36.52, |
|
"grad_norm": 0.8526188135147095, |
|
"learning_rate": 0.0005392592592592592, |
|
"loss": 1.1739, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 36.56, |
|
"grad_norm": 0.8166966438293457, |
|
"learning_rate": 0.0005377777777777779, |
|
"loss": 1.1633, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 36.59, |
|
"grad_norm": 0.8333300352096558, |
|
"learning_rate": 0.0005362962962962963, |
|
"loss": 1.1226, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 36.63, |
|
"grad_norm": 0.8543572425842285, |
|
"learning_rate": 0.0005348148148148149, |
|
"loss": 1.1745, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 36.67, |
|
"grad_norm": 0.8492584824562073, |
|
"learning_rate": 0.0005333333333333334, |
|
"loss": 1.1747, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 36.7, |
|
"grad_norm": 0.8231162428855896, |
|
"learning_rate": 0.0005318518518518518, |
|
"loss": 1.1825, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 36.74, |
|
"grad_norm": 0.8251094222068787, |
|
"learning_rate": 0.0005303703703703704, |
|
"loss": 1.1943, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 36.78, |
|
"grad_norm": 0.8481724262237549, |
|
"learning_rate": 0.0005288888888888889, |
|
"loss": 1.1725, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 36.81, |
|
"grad_norm": 0.9228334426879883, |
|
"learning_rate": 0.0005274074074074074, |
|
"loss": 1.1736, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 36.85, |
|
"grad_norm": 0.8884844779968262, |
|
"learning_rate": 0.0005259259259259259, |
|
"loss": 1.1856, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 36.89, |
|
"grad_norm": 0.8618705868721008, |
|
"learning_rate": 0.0005244444444444445, |
|
"loss": 1.2202, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 36.93, |
|
"grad_norm": 1.0046896934509277, |
|
"learning_rate": 0.0005229629629629629, |
|
"loss": 1.2043, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 36.96, |
|
"grad_norm": 0.8208348155021667, |
|
"learning_rate": 0.0005214814814814815, |
|
"loss": 1.1784, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"grad_norm": 1.7133288383483887, |
|
"learning_rate": 0.0005200000000000001, |
|
"loss": 1.1988, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 37.04, |
|
"grad_norm": 0.8276928663253784, |
|
"learning_rate": 0.0005185185185185185, |
|
"loss": 0.9958, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 37.07, |
|
"grad_norm": 0.8038898706436157, |
|
"learning_rate": 0.0005170370370370371, |
|
"loss": 1.0148, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 37.11, |
|
"grad_norm": 0.8378689885139465, |
|
"learning_rate": 0.0005155555555555556, |
|
"loss": 1.0242, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 37.15, |
|
"grad_norm": 0.826815664768219, |
|
"learning_rate": 0.0005140740740740741, |
|
"loss": 1.0133, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 37.19, |
|
"grad_norm": 0.8740249872207642, |
|
"learning_rate": 0.0005125925925925926, |
|
"loss": 1.0245, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 37.22, |
|
"grad_norm": 0.7812831401824951, |
|
"learning_rate": 0.0005111111111111111, |
|
"loss": 1.0451, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 37.26, |
|
"grad_norm": 0.8525770306587219, |
|
"learning_rate": 0.0005096296296296296, |
|
"loss": 1.0525, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 37.3, |
|
"grad_norm": 0.8188793063163757, |
|
"learning_rate": 0.0005081481481481481, |
|
"loss": 1.079, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 37.33, |
|
"grad_norm": 0.8820340633392334, |
|
"learning_rate": 0.0005066666666666668, |
|
"loss": 1.0808, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 37.37, |
|
"grad_norm": 0.9423266649246216, |
|
"learning_rate": 0.0005051851851851852, |
|
"loss": 1.0786, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 37.41, |
|
"grad_norm": 0.8830150365829468, |
|
"learning_rate": 0.0005037037037037037, |
|
"loss": 1.107, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 37.44, |
|
"grad_norm": 0.9641768932342529, |
|
"learning_rate": 0.0005022222222222223, |
|
"loss": 1.0972, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 37.48, |
|
"grad_norm": 0.8334240317344666, |
|
"learning_rate": 0.0005007407407407407, |
|
"loss": 1.1229, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 37.52, |
|
"grad_norm": 0.8352447748184204, |
|
"learning_rate": 0.0004992592592592593, |
|
"loss": 1.1144, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 37.56, |
|
"grad_norm": 0.834314227104187, |
|
"learning_rate": 0.0004977777777777778, |
|
"loss": 1.1161, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 37.59, |
|
"grad_norm": 0.8779057860374451, |
|
"learning_rate": 0.0004962962962962963, |
|
"loss": 1.133, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 37.63, |
|
"grad_norm": 0.8864279389381409, |
|
"learning_rate": 0.0004948148148148148, |
|
"loss": 1.1581, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 37.67, |
|
"grad_norm": 0.8707226514816284, |
|
"learning_rate": 0.0004933333333333334, |
|
"loss": 1.135, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 37.7, |
|
"grad_norm": 0.8655386567115784, |
|
"learning_rate": 0.0004918518518518519, |
|
"loss": 1.1366, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 37.74, |
|
"grad_norm": 0.9207199811935425, |
|
"learning_rate": 0.0004903703703703704, |
|
"loss": 1.1788, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 37.78, |
|
"grad_norm": 0.8524510264396667, |
|
"learning_rate": 0.0004888888888888889, |
|
"loss": 1.1285, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 37.81, |
|
"grad_norm": 0.8669392466545105, |
|
"learning_rate": 0.00048740740740740743, |
|
"loss": 1.1691, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 37.85, |
|
"grad_norm": 0.8475798964500427, |
|
"learning_rate": 0.00048592592592592595, |
|
"loss": 1.1825, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 37.89, |
|
"grad_norm": 0.8444201350212097, |
|
"learning_rate": 0.00048444444444444446, |
|
"loss": 1.1717, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 37.93, |
|
"grad_norm": 0.9086614847183228, |
|
"learning_rate": 0.000482962962962963, |
|
"loss": 1.1652, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 37.96, |
|
"grad_norm": 0.9066885709762573, |
|
"learning_rate": 0.00048148148148148144, |
|
"loss": 1.1702, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"grad_norm": 1.5993976593017578, |
|
"learning_rate": 0.00048, |
|
"loss": 1.1431, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 38.04, |
|
"grad_norm": 0.8458494544029236, |
|
"learning_rate": 0.00047851851851851853, |
|
"loss": 0.9829, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 38.07, |
|
"grad_norm": 0.8219654560089111, |
|
"learning_rate": 0.00047703703703703705, |
|
"loss": 1.0083, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 38.11, |
|
"grad_norm": 0.9130375385284424, |
|
"learning_rate": 0.00047555555555555556, |
|
"loss": 0.9749, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 38.15, |
|
"grad_norm": 0.8476659655570984, |
|
"learning_rate": 0.0004740740740740741, |
|
"loss": 1.0061, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 38.19, |
|
"grad_norm": 0.9237126708030701, |
|
"learning_rate": 0.00047259259259259265, |
|
"loss": 1.0001, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 38.22, |
|
"grad_norm": 0.8230653405189514, |
|
"learning_rate": 0.0004711111111111111, |
|
"loss": 1.0353, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 38.26, |
|
"grad_norm": 0.8375163078308105, |
|
"learning_rate": 0.00046962962962962963, |
|
"loss": 1.0583, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 38.3, |
|
"grad_norm": 0.8810118436813354, |
|
"learning_rate": 0.00046814814814814815, |
|
"loss": 1.0576, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 38.33, |
|
"grad_norm": 0.9873121380805969, |
|
"learning_rate": 0.00046666666666666666, |
|
"loss": 1.0278, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 38.37, |
|
"grad_norm": 0.8619394898414612, |
|
"learning_rate": 0.00046518518518518523, |
|
"loss": 1.0851, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 38.41, |
|
"grad_norm": 0.8966696262359619, |
|
"learning_rate": 0.00046370370370370375, |
|
"loss": 1.0599, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 38.44, |
|
"grad_norm": 0.8234748840332031, |
|
"learning_rate": 0.0004622222222222222, |
|
"loss": 1.0872, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 38.48, |
|
"grad_norm": 0.8345742225646973, |
|
"learning_rate": 0.00046074074074074073, |
|
"loss": 1.0703, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 38.52, |
|
"grad_norm": 0.957393229007721, |
|
"learning_rate": 0.00045925925925925925, |
|
"loss": 1.0654, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 38.56, |
|
"grad_norm": 0.8456324338912964, |
|
"learning_rate": 0.0004577777777777778, |
|
"loss": 1.082, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 38.59, |
|
"grad_norm": 0.923425555229187, |
|
"learning_rate": 0.00045629629629629633, |
|
"loss": 1.0821, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 38.63, |
|
"grad_norm": 0.8464405536651611, |
|
"learning_rate": 0.00045481481481481485, |
|
"loss": 1.1023, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 38.67, |
|
"grad_norm": 0.8511638641357422, |
|
"learning_rate": 0.0004533333333333333, |
|
"loss": 1.0764, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 38.7, |
|
"grad_norm": 0.8756155371665955, |
|
"learning_rate": 0.00045185185185185183, |
|
"loss": 1.0985, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 38.74, |
|
"grad_norm": 0.9443466067314148, |
|
"learning_rate": 0.0004503703703703704, |
|
"loss": 1.1203, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 38.78, |
|
"grad_norm": 0.9254005551338196, |
|
"learning_rate": 0.0004488888888888889, |
|
"loss": 1.1035, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 38.81, |
|
"grad_norm": 0.8819862008094788, |
|
"learning_rate": 0.00044740740740740743, |
|
"loss": 1.1301, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 38.85, |
|
"grad_norm": 0.9181599617004395, |
|
"learning_rate": 0.00044592592592592595, |
|
"loss": 1.1231, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 38.89, |
|
"grad_norm": 0.8710459470748901, |
|
"learning_rate": 0.0004444444444444444, |
|
"loss": 1.1396, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 38.93, |
|
"grad_norm": 0.8439869284629822, |
|
"learning_rate": 0.000442962962962963, |
|
"loss": 1.1424, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 38.96, |
|
"grad_norm": 0.9218877553939819, |
|
"learning_rate": 0.0004414814814814815, |
|
"loss": 1.1299, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"grad_norm": 1.462300419807434, |
|
"learning_rate": 0.00044, |
|
"loss": 1.1464, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 39.04, |
|
"grad_norm": 0.8511271476745605, |
|
"learning_rate": 0.00043851851851851853, |
|
"loss": 0.9556, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 39.07, |
|
"grad_norm": 0.8737080097198486, |
|
"learning_rate": 0.00043703703703703705, |
|
"loss": 0.9775, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 39.11, |
|
"grad_norm": 0.8078649640083313, |
|
"learning_rate": 0.0004355555555555555, |
|
"loss": 0.9902, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 39.15, |
|
"grad_norm": 0.885040819644928, |
|
"learning_rate": 0.0004340740740740741, |
|
"loss": 0.9642, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 39.19, |
|
"grad_norm": 0.7933640480041504, |
|
"learning_rate": 0.0004325925925925926, |
|
"loss": 0.9942, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 39.22, |
|
"grad_norm": 0.8872569799423218, |
|
"learning_rate": 0.0004311111111111111, |
|
"loss": 0.9867, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 39.26, |
|
"grad_norm": 0.9073619842529297, |
|
"learning_rate": 0.00042962962962962963, |
|
"loss": 0.9865, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 39.3, |
|
"grad_norm": 0.8212401270866394, |
|
"learning_rate": 0.00042814814814814815, |
|
"loss": 1.0186, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 39.33, |
|
"grad_norm": 0.9199197292327881, |
|
"learning_rate": 0.0004266666666666667, |
|
"loss": 1.036, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 39.37, |
|
"grad_norm": 0.8489059805870056, |
|
"learning_rate": 0.0004251851851851852, |
|
"loss": 1.0283, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 39.41, |
|
"grad_norm": 0.8785027861595154, |
|
"learning_rate": 0.0004237037037037037, |
|
"loss": 1.0366, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 39.44, |
|
"grad_norm": 0.9491832256317139, |
|
"learning_rate": 0.0004222222222222222, |
|
"loss": 1.0272, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 39.48, |
|
"grad_norm": 0.8814444541931152, |
|
"learning_rate": 0.00042074074074074073, |
|
"loss": 1.0486, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 39.52, |
|
"grad_norm": 0.8941586017608643, |
|
"learning_rate": 0.0004192592592592593, |
|
"loss": 1.0587, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 39.56, |
|
"grad_norm": 0.8640462756156921, |
|
"learning_rate": 0.0004177777777777778, |
|
"loss": 1.0482, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 39.59, |
|
"grad_norm": 0.9424142241477966, |
|
"learning_rate": 0.0004162962962962963, |
|
"loss": 1.0624, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 39.63, |
|
"grad_norm": 0.883101224899292, |
|
"learning_rate": 0.0004148148148148148, |
|
"loss": 1.0434, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 39.67, |
|
"grad_norm": 0.9248467087745667, |
|
"learning_rate": 0.0004133333333333333, |
|
"loss": 1.0626, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 39.7, |
|
"grad_norm": 0.9620226621627808, |
|
"learning_rate": 0.0004118518518518519, |
|
"loss": 1.0794, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 39.74, |
|
"grad_norm": 0.8799591064453125, |
|
"learning_rate": 0.0004103703703703704, |
|
"loss": 1.072, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 39.78, |
|
"grad_norm": 0.9275612831115723, |
|
"learning_rate": 0.0004088888888888889, |
|
"loss": 1.0961, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 39.81, |
|
"grad_norm": 0.8838842511177063, |
|
"learning_rate": 0.0004074074074074074, |
|
"loss": 1.0729, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 39.85, |
|
"grad_norm": 0.8704506158828735, |
|
"learning_rate": 0.0004059259259259259, |
|
"loss": 1.0743, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 39.89, |
|
"grad_norm": 0.9030023217201233, |
|
"learning_rate": 0.00040444444444444447, |
|
"loss": 1.1022, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 39.93, |
|
"grad_norm": 0.9882118701934814, |
|
"learning_rate": 0.000402962962962963, |
|
"loss": 1.1086, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 39.96, |
|
"grad_norm": 0.848917543888092, |
|
"learning_rate": 0.0004014814814814815, |
|
"loss": 1.1086, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 1.5683587789535522, |
|
"learning_rate": 0.0004, |
|
"loss": 1.1207, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 40.04, |
|
"grad_norm": 0.8366634845733643, |
|
"learning_rate": 0.0003985185185185185, |
|
"loss": 0.9414, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 40.07, |
|
"grad_norm": 0.876707911491394, |
|
"learning_rate": 0.00039703703703703705, |
|
"loss": 0.9212, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 40.11, |
|
"grad_norm": 0.7773799300193787, |
|
"learning_rate": 0.00039555555555555557, |
|
"loss": 0.9462, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 40.15, |
|
"grad_norm": 0.7996877431869507, |
|
"learning_rate": 0.0003940740740740741, |
|
"loss": 0.9493, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 40.19, |
|
"grad_norm": 0.8463727831840515, |
|
"learning_rate": 0.0003925925925925926, |
|
"loss": 0.9419, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 40.22, |
|
"grad_norm": 0.8150336742401123, |
|
"learning_rate": 0.0003911111111111111, |
|
"loss": 0.9619, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 40.26, |
|
"grad_norm": 0.8403350114822388, |
|
"learning_rate": 0.00038962962962962964, |
|
"loss": 0.968, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 40.3, |
|
"grad_norm": 0.8616166710853577, |
|
"learning_rate": 0.00038814814814814815, |
|
"loss": 0.9785, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 40.33, |
|
"grad_norm": 0.8425131440162659, |
|
"learning_rate": 0.00038666666666666667, |
|
"loss": 0.9913, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 40.37, |
|
"grad_norm": 0.8531786799430847, |
|
"learning_rate": 0.0003851851851851852, |
|
"loss": 0.9692, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 40.41, |
|
"grad_norm": 0.9102742671966553, |
|
"learning_rate": 0.0003837037037037037, |
|
"loss": 1.0184, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 40.44, |
|
"grad_norm": 0.8685367703437805, |
|
"learning_rate": 0.0003822222222222223, |
|
"loss": 1.0278, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 40.48, |
|
"grad_norm": 0.8668144941329956, |
|
"learning_rate": 0.0003807407407407408, |
|
"loss": 0.9819, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 40.52, |
|
"grad_norm": 0.9007130861282349, |
|
"learning_rate": 0.00037925925925925925, |
|
"loss": 0.9945, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 40.56, |
|
"grad_norm": 0.93947434425354, |
|
"learning_rate": 0.00037777777777777777, |
|
"loss": 1.0076, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 40.59, |
|
"grad_norm": 0.8986189961433411, |
|
"learning_rate": 0.0003762962962962963, |
|
"loss": 1.0123, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 40.63, |
|
"grad_norm": 0.8962591290473938, |
|
"learning_rate": 0.00037481481481481486, |
|
"loss": 1.0446, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 40.67, |
|
"grad_norm": 0.8676485419273376, |
|
"learning_rate": 0.0003733333333333334, |
|
"loss": 1.036, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 40.7, |
|
"grad_norm": 0.9445183873176575, |
|
"learning_rate": 0.0003718518518518519, |
|
"loss": 1.0697, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 40.74, |
|
"grad_norm": 0.9026569128036499, |
|
"learning_rate": 0.00037037037037037035, |
|
"loss": 1.0636, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 40.78, |
|
"grad_norm": 0.8249679803848267, |
|
"learning_rate": 0.00036888888888888887, |
|
"loss": 1.0346, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 40.81, |
|
"grad_norm": 0.8658695220947266, |
|
"learning_rate": 0.00036740740740740744, |
|
"loss": 1.0539, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 40.85, |
|
"grad_norm": 0.9501706957817078, |
|
"learning_rate": 0.00036592592592592596, |
|
"loss": 1.0831, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 40.89, |
|
"grad_norm": 0.9114291667938232, |
|
"learning_rate": 0.00036444444444444447, |
|
"loss": 1.0748, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 40.93, |
|
"grad_norm": 0.8624236583709717, |
|
"learning_rate": 0.000362962962962963, |
|
"loss": 1.072, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 40.96, |
|
"grad_norm": 0.8550862669944763, |
|
"learning_rate": 0.00036148148148148145, |
|
"loss": 1.1031, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"grad_norm": 1.7575430870056152, |
|
"learning_rate": 0.00035999999999999997, |
|
"loss": 1.0836, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 41.04, |
|
"grad_norm": 0.8067315220832825, |
|
"learning_rate": 0.00035851851851851854, |
|
"loss": 0.8977, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 41.07, |
|
"grad_norm": 0.8133816123008728, |
|
"learning_rate": 0.00035703703703703706, |
|
"loss": 0.906, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 41.11, |
|
"grad_norm": 0.7989582419395447, |
|
"learning_rate": 0.00035555555555555557, |
|
"loss": 0.9014, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 41.15, |
|
"grad_norm": 0.7977972626686096, |
|
"learning_rate": 0.0003540740740740741, |
|
"loss": 0.9254, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 41.19, |
|
"grad_norm": 0.8233643770217896, |
|
"learning_rate": 0.00035259259259259255, |
|
"loss": 0.9437, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 41.22, |
|
"grad_norm": 0.8295071125030518, |
|
"learning_rate": 0.0003511111111111111, |
|
"loss": 0.9336, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 41.26, |
|
"grad_norm": 0.855749785900116, |
|
"learning_rate": 0.00034962962962962964, |
|
"loss": 0.9252, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 41.3, |
|
"grad_norm": 0.8856293559074402, |
|
"learning_rate": 0.00034814814814814816, |
|
"loss": 0.9455, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 41.33, |
|
"grad_norm": 0.7978120446205139, |
|
"learning_rate": 0.00034666666666666667, |
|
"loss": 0.9425, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 41.37, |
|
"grad_norm": 0.979863166809082, |
|
"learning_rate": 0.0003451851851851852, |
|
"loss": 0.9542, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 41.41, |
|
"grad_norm": 0.9663028717041016, |
|
"learning_rate": 0.0003437037037037037, |
|
"loss": 0.9512, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 41.44, |
|
"grad_norm": 0.8670268058776855, |
|
"learning_rate": 0.0003422222222222222, |
|
"loss": 0.9682, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 41.48, |
|
"grad_norm": 0.8599923849105835, |
|
"learning_rate": 0.00034074074074074074, |
|
"loss": 0.9807, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 41.52, |
|
"grad_norm": 0.8521960377693176, |
|
"learning_rate": 0.00033925925925925926, |
|
"loss": 1.0195, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 41.56, |
|
"grad_norm": 0.8760194778442383, |
|
"learning_rate": 0.00033777777777777777, |
|
"loss": 1.0155, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 41.59, |
|
"grad_norm": 0.9435073733329773, |
|
"learning_rate": 0.00033629629629629634, |
|
"loss": 1.0335, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 41.63, |
|
"grad_norm": 0.95516037940979, |
|
"learning_rate": 0.0003348148148148148, |
|
"loss": 1.0216, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 41.67, |
|
"grad_norm": 0.9270447492599487, |
|
"learning_rate": 0.0003333333333333333, |
|
"loss": 0.9949, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 41.7, |
|
"grad_norm": 0.9160760045051575, |
|
"learning_rate": 0.00033185185185185184, |
|
"loss": 1.0288, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 41.74, |
|
"grad_norm": 0.9127193093299866, |
|
"learning_rate": 0.00033037037037037036, |
|
"loss": 1.0357, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 41.78, |
|
"grad_norm": 0.993202269077301, |
|
"learning_rate": 0.0003288888888888889, |
|
"loss": 1.0154, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 41.81, |
|
"grad_norm": 0.9545233845710754, |
|
"learning_rate": 0.00032740740740740744, |
|
"loss": 1.0064, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 41.85, |
|
"grad_norm": 0.9582635760307312, |
|
"learning_rate": 0.00032592592592592596, |
|
"loss": 1.0345, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 41.89, |
|
"grad_norm": 0.8648403882980347, |
|
"learning_rate": 0.0003244444444444444, |
|
"loss": 0.9964, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 41.93, |
|
"grad_norm": 0.9680795669555664, |
|
"learning_rate": 0.00032296296296296294, |
|
"loss": 1.0256, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 41.96, |
|
"grad_norm": 0.9373118281364441, |
|
"learning_rate": 0.0003214814814814815, |
|
"loss": 1.0366, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 42.0, |
|
"grad_norm": 1.8185220956802368, |
|
"learning_rate": 0.00032, |
|
"loss": 1.036, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 42.04, |
|
"grad_norm": 0.8170128464698792, |
|
"learning_rate": 0.00031851851851851854, |
|
"loss": 0.8674, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 42.07, |
|
"grad_norm": 0.8094532489776611, |
|
"learning_rate": 0.00031703703703703706, |
|
"loss": 0.8846, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 42.11, |
|
"grad_norm": 0.796272873878479, |
|
"learning_rate": 0.0003155555555555555, |
|
"loss": 0.8928, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 42.15, |
|
"grad_norm": 0.8592316508293152, |
|
"learning_rate": 0.0003140740740740741, |
|
"loss": 0.8658, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 42.19, |
|
"grad_norm": 0.9022819399833679, |
|
"learning_rate": 0.0003125925925925926, |
|
"loss": 0.9059, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 42.22, |
|
"grad_norm": 0.8500738739967346, |
|
"learning_rate": 0.0003111111111111111, |
|
"loss": 0.8895, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 42.26, |
|
"grad_norm": 0.9119003415107727, |
|
"learning_rate": 0.00030962962962962964, |
|
"loss": 0.9162, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 42.3, |
|
"grad_norm": 0.8775156140327454, |
|
"learning_rate": 0.00030814814814814816, |
|
"loss": 0.9242, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 42.33, |
|
"grad_norm": 0.882416307926178, |
|
"learning_rate": 0.0003066666666666667, |
|
"loss": 0.9143, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 42.37, |
|
"grad_norm": 0.928913950920105, |
|
"learning_rate": 0.0003051851851851852, |
|
"loss": 0.9363, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 42.41, |
|
"grad_norm": 0.8908606171607971, |
|
"learning_rate": 0.0003037037037037037, |
|
"loss": 0.9727, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 42.44, |
|
"grad_norm": 0.8055885434150696, |
|
"learning_rate": 0.0003022222222222222, |
|
"loss": 0.9555, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 42.48, |
|
"grad_norm": 0.8678398728370667, |
|
"learning_rate": 0.00030074074074074074, |
|
"loss": 0.9621, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 42.52, |
|
"grad_norm": 0.9374486804008484, |
|
"learning_rate": 0.0002992592592592593, |
|
"loss": 0.9392, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 42.56, |
|
"grad_norm": 0.8560148477554321, |
|
"learning_rate": 0.0002977777777777778, |
|
"loss": 0.969, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 42.59, |
|
"grad_norm": 0.9379673004150391, |
|
"learning_rate": 0.0002962962962962963, |
|
"loss": 0.9561, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 42.63, |
|
"grad_norm": 0.8736953735351562, |
|
"learning_rate": 0.0002948148148148148, |
|
"loss": 0.9687, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 42.67, |
|
"grad_norm": 0.8709719777107239, |
|
"learning_rate": 0.0002933333333333333, |
|
"loss": 0.9725, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 42.7, |
|
"grad_norm": 0.9811522364616394, |
|
"learning_rate": 0.00029185185185185184, |
|
"loss": 0.9799, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 42.74, |
|
"grad_norm": 0.9396344423294067, |
|
"learning_rate": 0.0002903703703703704, |
|
"loss": 0.9782, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 42.78, |
|
"grad_norm": 0.8983749747276306, |
|
"learning_rate": 0.0002888888888888889, |
|
"loss": 1.0041, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 42.81, |
|
"grad_norm": 0.8971825242042542, |
|
"learning_rate": 0.0002874074074074074, |
|
"loss": 0.9945, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 42.85, |
|
"grad_norm": 0.9464390277862549, |
|
"learning_rate": 0.0002859259259259259, |
|
"loss": 0.9863, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 42.89, |
|
"grad_norm": 0.8920719027519226, |
|
"learning_rate": 0.0002844444444444444, |
|
"loss": 1.0018, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 42.93, |
|
"grad_norm": 0.9418626427650452, |
|
"learning_rate": 0.000282962962962963, |
|
"loss": 1.0104, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 42.96, |
|
"grad_norm": 0.9117964506149292, |
|
"learning_rate": 0.0002814814814814815, |
|
"loss": 1.011, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 43.0, |
|
"grad_norm": 1.7632156610488892, |
|
"learning_rate": 0.00028000000000000003, |
|
"loss": 0.9915, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 43.04, |
|
"grad_norm": 0.8767898082733154, |
|
"learning_rate": 0.0002785185185185185, |
|
"loss": 0.8568, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 43.07, |
|
"grad_norm": 0.7765992283821106, |
|
"learning_rate": 0.000277037037037037, |
|
"loss": 0.8576, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 43.11, |
|
"grad_norm": 0.8032299876213074, |
|
"learning_rate": 0.0002755555555555556, |
|
"loss": 0.8709, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 43.15, |
|
"grad_norm": 0.7957133054733276, |
|
"learning_rate": 0.0002740740740740741, |
|
"loss": 0.875, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 43.19, |
|
"grad_norm": 0.8329715728759766, |
|
"learning_rate": 0.0002725925925925926, |
|
"loss": 0.8807, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 43.22, |
|
"grad_norm": 0.9103427529335022, |
|
"learning_rate": 0.00027111111111111113, |
|
"loss": 0.8938, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 43.26, |
|
"grad_norm": 0.8969374299049377, |
|
"learning_rate": 0.0002696296296296296, |
|
"loss": 0.9047, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 43.3, |
|
"grad_norm": 0.8393443822860718, |
|
"learning_rate": 0.00026814814814814816, |
|
"loss": 0.8889, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 43.33, |
|
"grad_norm": 0.8780049681663513, |
|
"learning_rate": 0.0002666666666666667, |
|
"loss": 0.9081, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 43.37, |
|
"grad_norm": 0.838235080242157, |
|
"learning_rate": 0.0002651851851851852, |
|
"loss": 0.8852, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 43.41, |
|
"grad_norm": 0.9195196032524109, |
|
"learning_rate": 0.0002637037037037037, |
|
"loss": 0.9201, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 43.44, |
|
"grad_norm": 0.8491339683532715, |
|
"learning_rate": 0.00026222222222222223, |
|
"loss": 0.9016, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 43.48, |
|
"grad_norm": 0.861749529838562, |
|
"learning_rate": 0.00026074074074074075, |
|
"loss": 0.9327, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 43.52, |
|
"grad_norm": 0.8553155660629272, |
|
"learning_rate": 0.00025925925925925926, |
|
"loss": 0.9025, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 43.56, |
|
"grad_norm": 0.923421323299408, |
|
"learning_rate": 0.0002577777777777778, |
|
"loss": 0.9277, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 43.59, |
|
"grad_norm": 0.949780285358429, |
|
"learning_rate": 0.0002562962962962963, |
|
"loss": 0.9283, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 43.63, |
|
"grad_norm": 0.8996204733848572, |
|
"learning_rate": 0.0002548148148148148, |
|
"loss": 0.9325, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 43.67, |
|
"grad_norm": 0.8850076198577881, |
|
"learning_rate": 0.0002533333333333334, |
|
"loss": 0.9396, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 43.7, |
|
"grad_norm": 0.9062913656234741, |
|
"learning_rate": 0.00025185185185185185, |
|
"loss": 0.9394, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 43.74, |
|
"grad_norm": 0.8735949397087097, |
|
"learning_rate": 0.00025037037037037036, |
|
"loss": 0.9475, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 43.78, |
|
"grad_norm": 0.9367344379425049, |
|
"learning_rate": 0.0002488888888888889, |
|
"loss": 0.9474, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 43.81, |
|
"grad_norm": 0.9842424988746643, |
|
"learning_rate": 0.0002474074074074074, |
|
"loss": 0.9767, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 43.85, |
|
"grad_norm": 0.9207190275192261, |
|
"learning_rate": 0.00024592592592592597, |
|
"loss": 0.9553, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 43.89, |
|
"grad_norm": 0.8815023899078369, |
|
"learning_rate": 0.00024444444444444443, |
|
"loss": 0.947, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 43.93, |
|
"grad_norm": 0.9600652456283569, |
|
"learning_rate": 0.00024296296296296297, |
|
"loss": 0.9737, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 43.96, |
|
"grad_norm": 0.9066606760025024, |
|
"learning_rate": 0.0002414814814814815, |
|
"loss": 0.9744, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 44.0, |
|
"grad_norm": 1.5773016214370728, |
|
"learning_rate": 0.00024, |
|
"loss": 0.9578, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 44.04, |
|
"grad_norm": 0.8049682378768921, |
|
"learning_rate": 0.00023851851851851852, |
|
"loss": 0.8292, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 44.07, |
|
"grad_norm": 0.7937451601028442, |
|
"learning_rate": 0.00023703703703703704, |
|
"loss": 0.8328, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 44.11, |
|
"grad_norm": 0.8170547485351562, |
|
"learning_rate": 0.00023555555555555556, |
|
"loss": 0.8325, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 44.15, |
|
"grad_norm": 0.8086414337158203, |
|
"learning_rate": 0.00023407407407407407, |
|
"loss": 0.8293, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 44.19, |
|
"grad_norm": 0.8746134042739868, |
|
"learning_rate": 0.00023259259259259262, |
|
"loss": 0.8795, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 44.22, |
|
"grad_norm": 0.8427841663360596, |
|
"learning_rate": 0.0002311111111111111, |
|
"loss": 0.8783, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 44.26, |
|
"grad_norm": 0.8464419841766357, |
|
"learning_rate": 0.00022962962962962962, |
|
"loss": 0.8574, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 44.3, |
|
"grad_norm": 0.947233259677887, |
|
"learning_rate": 0.00022814814814814817, |
|
"loss": 0.8892, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 44.33, |
|
"grad_norm": 0.850940465927124, |
|
"learning_rate": 0.00022666666666666666, |
|
"loss": 0.8745, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 44.37, |
|
"grad_norm": 0.8650044202804565, |
|
"learning_rate": 0.0002251851851851852, |
|
"loss": 0.8772, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 44.41, |
|
"grad_norm": 0.8571513891220093, |
|
"learning_rate": 0.00022370370370370372, |
|
"loss": 0.8875, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 44.44, |
|
"grad_norm": 0.8866828680038452, |
|
"learning_rate": 0.0002222222222222222, |
|
"loss": 0.8747, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 44.48, |
|
"grad_norm": 0.9303273558616638, |
|
"learning_rate": 0.00022074074074074075, |
|
"loss": 0.9052, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 44.52, |
|
"grad_norm": 0.8586504459381104, |
|
"learning_rate": 0.00021925925925925927, |
|
"loss": 0.8762, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 44.56, |
|
"grad_norm": 0.8415549397468567, |
|
"learning_rate": 0.00021777777777777776, |
|
"loss": 0.8934, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 44.59, |
|
"grad_norm": 0.894075334072113, |
|
"learning_rate": 0.0002162962962962963, |
|
"loss": 0.885, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 44.63, |
|
"grad_norm": 0.9093772768974304, |
|
"learning_rate": 0.00021481481481481482, |
|
"loss": 0.9059, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 44.67, |
|
"grad_norm": 1.0175001621246338, |
|
"learning_rate": 0.00021333333333333336, |
|
"loss": 0.9157, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 44.7, |
|
"grad_norm": 0.9057775735855103, |
|
"learning_rate": 0.00021185185185185185, |
|
"loss": 0.9139, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 44.74, |
|
"grad_norm": 0.8696966767311096, |
|
"learning_rate": 0.00021037037037037037, |
|
"loss": 0.9275, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 44.78, |
|
"grad_norm": 1.0306414365768433, |
|
"learning_rate": 0.0002088888888888889, |
|
"loss": 0.9056, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 44.81, |
|
"grad_norm": 0.8880541920661926, |
|
"learning_rate": 0.0002074074074074074, |
|
"loss": 0.9099, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 44.85, |
|
"grad_norm": 0.9045748710632324, |
|
"learning_rate": 0.00020592592592592594, |
|
"loss": 0.9254, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 44.89, |
|
"grad_norm": 0.9737614989280701, |
|
"learning_rate": 0.00020444444444444446, |
|
"loss": 0.9166, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 44.93, |
|
"grad_norm": 0.838835597038269, |
|
"learning_rate": 0.00020296296296296295, |
|
"loss": 0.9455, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 44.96, |
|
"grad_norm": 0.911868691444397, |
|
"learning_rate": 0.0002014814814814815, |
|
"loss": 0.9586, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"grad_norm": 1.629447340965271, |
|
"learning_rate": 0.0002, |
|
"loss": 0.9213, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 45.04, |
|
"grad_norm": 0.8545764684677124, |
|
"learning_rate": 0.00019851851851851853, |
|
"loss": 0.8333, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 45.07, |
|
"grad_norm": 0.8149832487106323, |
|
"learning_rate": 0.00019703703703703704, |
|
"loss": 0.7863, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 45.11, |
|
"grad_norm": 0.8192715644836426, |
|
"learning_rate": 0.00019555555555555556, |
|
"loss": 0.8435, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 45.15, |
|
"grad_norm": 0.8543215394020081, |
|
"learning_rate": 0.00019407407407407408, |
|
"loss": 0.8246, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 45.19, |
|
"grad_norm": 0.8113214373588562, |
|
"learning_rate": 0.0001925925925925926, |
|
"loss": 0.8264, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 45.22, |
|
"grad_norm": 0.8288257718086243, |
|
"learning_rate": 0.00019111111111111114, |
|
"loss": 0.8308, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 45.26, |
|
"grad_norm": 0.8256189227104187, |
|
"learning_rate": 0.00018962962962962963, |
|
"loss": 0.8362, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 45.3, |
|
"grad_norm": 0.8684147596359253, |
|
"learning_rate": 0.00018814814814814814, |
|
"loss": 0.8363, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 45.33, |
|
"grad_norm": 0.850592315196991, |
|
"learning_rate": 0.0001866666666666667, |
|
"loss": 0.853, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 45.37, |
|
"grad_norm": 0.894404411315918, |
|
"learning_rate": 0.00018518518518518518, |
|
"loss": 0.8639, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 45.41, |
|
"grad_norm": 0.8467885255813599, |
|
"learning_rate": 0.00018370370370370372, |
|
"loss": 0.8461, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 45.44, |
|
"grad_norm": 0.9240211844444275, |
|
"learning_rate": 0.00018222222222222224, |
|
"loss": 0.8751, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 45.48, |
|
"grad_norm": 0.8148699998855591, |
|
"learning_rate": 0.00018074074074074073, |
|
"loss": 0.8744, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 45.52, |
|
"grad_norm": 0.8215749859809875, |
|
"learning_rate": 0.00017925925925925927, |
|
"loss": 0.8629, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 45.56, |
|
"grad_norm": 0.9265359044075012, |
|
"learning_rate": 0.00017777777777777779, |
|
"loss": 0.882, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 45.59, |
|
"grad_norm": 0.8920613527297974, |
|
"learning_rate": 0.00017629629629629628, |
|
"loss": 0.8522, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 45.63, |
|
"grad_norm": 0.964079737663269, |
|
"learning_rate": 0.00017481481481481482, |
|
"loss": 0.873, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 45.67, |
|
"grad_norm": 0.8842025399208069, |
|
"learning_rate": 0.00017333333333333334, |
|
"loss": 0.8642, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 45.7, |
|
"grad_norm": 0.9127376079559326, |
|
"learning_rate": 0.00017185185185185185, |
|
"loss": 0.8805, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 45.74, |
|
"grad_norm": 0.8502327799797058, |
|
"learning_rate": 0.00017037037037037037, |
|
"loss": 0.8818, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 45.78, |
|
"grad_norm": 0.9649406671524048, |
|
"learning_rate": 0.00016888888888888889, |
|
"loss": 0.8934, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 45.81, |
|
"grad_norm": 0.9439674615859985, |
|
"learning_rate": 0.0001674074074074074, |
|
"loss": 0.9173, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 45.85, |
|
"grad_norm": 0.8551307916641235, |
|
"learning_rate": 0.00016592592592592592, |
|
"loss": 0.8811, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 45.89, |
|
"grad_norm": 0.9205163717269897, |
|
"learning_rate": 0.00016444444444444446, |
|
"loss": 0.8846, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 45.93, |
|
"grad_norm": 0.90606290102005, |
|
"learning_rate": 0.00016296296296296298, |
|
"loss": 0.8941, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 45.96, |
|
"grad_norm": 0.9254661798477173, |
|
"learning_rate": 0.00016148148148148147, |
|
"loss": 0.901, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 46.0, |
|
"grad_norm": 1.6181832551956177, |
|
"learning_rate": 0.00016, |
|
"loss": 0.8908, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 46.04, |
|
"grad_norm": 0.8116936683654785, |
|
"learning_rate": 0.00015851851851851853, |
|
"loss": 0.8005, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 46.07, |
|
"grad_norm": 0.749098002910614, |
|
"learning_rate": 0.00015703703703703705, |
|
"loss": 0.7914, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 46.11, |
|
"grad_norm": 0.7693229913711548, |
|
"learning_rate": 0.00015555555555555556, |
|
"loss": 0.8053, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 46.15, |
|
"grad_norm": 0.7828912138938904, |
|
"learning_rate": 0.00015407407407407408, |
|
"loss": 0.7946, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 46.19, |
|
"grad_norm": 0.8083990216255188, |
|
"learning_rate": 0.0001525925925925926, |
|
"loss": 0.817, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 46.22, |
|
"grad_norm": 0.827688455581665, |
|
"learning_rate": 0.0001511111111111111, |
|
"loss": 0.7919, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 46.26, |
|
"grad_norm": 0.8141525983810425, |
|
"learning_rate": 0.00014962962962962966, |
|
"loss": 0.8346, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 46.3, |
|
"grad_norm": 0.823664128780365, |
|
"learning_rate": 0.00014814814814814815, |
|
"loss": 0.84, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 46.33, |
|
"grad_norm": 0.8547099232673645, |
|
"learning_rate": 0.00014666666666666666, |
|
"loss": 0.803, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 46.37, |
|
"grad_norm": 0.8899357914924622, |
|
"learning_rate": 0.0001451851851851852, |
|
"loss": 0.8316, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 46.41, |
|
"grad_norm": 0.8477994203567505, |
|
"learning_rate": 0.0001437037037037037, |
|
"loss": 0.8129, |
|
"step": 12530 |
|
}, |
|
{ |
|
"epoch": 46.44, |
|
"grad_norm": 0.7684743404388428, |
|
"learning_rate": 0.0001422222222222222, |
|
"loss": 0.8405, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 46.48, |
|
"grad_norm": 0.816189706325531, |
|
"learning_rate": 0.00014074074074074076, |
|
"loss": 0.8365, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 46.52, |
|
"grad_norm": 0.844874382019043, |
|
"learning_rate": 0.00013925925925925925, |
|
"loss": 0.8149, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 46.56, |
|
"grad_norm": 0.8706566095352173, |
|
"learning_rate": 0.0001377777777777778, |
|
"loss": 0.8676, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 46.59, |
|
"grad_norm": 0.8781683444976807, |
|
"learning_rate": 0.0001362962962962963, |
|
"loss": 0.8667, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 46.63, |
|
"grad_norm": 0.8802633881568909, |
|
"learning_rate": 0.0001348148148148148, |
|
"loss": 0.8314, |
|
"step": 12590 |
|
}, |
|
{ |
|
"epoch": 46.67, |
|
"grad_norm": 0.8872252106666565, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 0.858, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 46.7, |
|
"grad_norm": 0.8980612754821777, |
|
"learning_rate": 0.00013185185185185186, |
|
"loss": 0.8538, |
|
"step": 12610 |
|
}, |
|
{ |
|
"epoch": 46.74, |
|
"grad_norm": 0.8398934006690979, |
|
"learning_rate": 0.00013037037037037037, |
|
"loss": 0.8457, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 46.78, |
|
"grad_norm": 0.8653325438499451, |
|
"learning_rate": 0.0001288888888888889, |
|
"loss": 0.8621, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 46.81, |
|
"grad_norm": 0.9367448687553406, |
|
"learning_rate": 0.0001274074074074074, |
|
"loss": 0.8437, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 46.85, |
|
"grad_norm": 0.9195182919502258, |
|
"learning_rate": 0.00012592592592592592, |
|
"loss": 0.8675, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 46.89, |
|
"grad_norm": 0.8260027170181274, |
|
"learning_rate": 0.00012444444444444444, |
|
"loss": 0.8702, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 46.93, |
|
"grad_norm": 0.8661918044090271, |
|
"learning_rate": 0.00012296296296296298, |
|
"loss": 0.8738, |
|
"step": 12670 |
|
}, |
|
{ |
|
"epoch": 46.96, |
|
"grad_norm": 0.9230578541755676, |
|
"learning_rate": 0.00012148148148148149, |
|
"loss": 0.8542, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 47.0, |
|
"grad_norm": 1.741862177848816, |
|
"learning_rate": 0.00012, |
|
"loss": 0.8547, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 47.04, |
|
"grad_norm": 0.864289402961731, |
|
"learning_rate": 0.00011851851851851852, |
|
"loss": 0.7575, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 47.07, |
|
"grad_norm": 0.7751877307891846, |
|
"learning_rate": 0.00011703703703703704, |
|
"loss": 0.7825, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 47.11, |
|
"grad_norm": 0.8020772933959961, |
|
"learning_rate": 0.00011555555555555555, |
|
"loss": 0.787, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 47.15, |
|
"grad_norm": 0.8673202395439148, |
|
"learning_rate": 0.00011407407407407408, |
|
"loss": 0.7693, |
|
"step": 12730 |
|
}, |
|
{ |
|
"epoch": 47.19, |
|
"grad_norm": 0.7707006335258484, |
|
"learning_rate": 0.0001125925925925926, |
|
"loss": 0.779, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 47.22, |
|
"grad_norm": 0.7779736518859863, |
|
"learning_rate": 0.0001111111111111111, |
|
"loss": 0.7879, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 47.26, |
|
"grad_norm": 0.8151842951774597, |
|
"learning_rate": 0.00010962962962962963, |
|
"loss": 0.806, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 47.3, |
|
"grad_norm": 0.7836940884590149, |
|
"learning_rate": 0.00010814814814814815, |
|
"loss": 0.7884, |
|
"step": 12770 |
|
}, |
|
{ |
|
"epoch": 47.33, |
|
"grad_norm": 0.826682448387146, |
|
"learning_rate": 0.00010666666666666668, |
|
"loss": 0.8061, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 47.37, |
|
"grad_norm": 0.8258213996887207, |
|
"learning_rate": 0.00010518518518518518, |
|
"loss": 0.7968, |
|
"step": 12790 |
|
}, |
|
{ |
|
"epoch": 47.41, |
|
"grad_norm": 0.8098385334014893, |
|
"learning_rate": 0.0001037037037037037, |
|
"loss": 0.8034, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 47.44, |
|
"grad_norm": 0.8096972107887268, |
|
"learning_rate": 0.00010222222222222223, |
|
"loss": 0.813, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 47.48, |
|
"grad_norm": 0.8447617292404175, |
|
"learning_rate": 0.00010074074074074075, |
|
"loss": 0.8093, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 47.52, |
|
"grad_norm": 0.81069016456604, |
|
"learning_rate": 9.925925925925926e-05, |
|
"loss": 0.8279, |
|
"step": 12830 |
|
}, |
|
{ |
|
"epoch": 47.56, |
|
"grad_norm": 0.8630595207214355, |
|
"learning_rate": 9.777777777777778e-05, |
|
"loss": 0.8246, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 47.59, |
|
"grad_norm": 0.839095950126648, |
|
"learning_rate": 9.62962962962963e-05, |
|
"loss": 0.8453, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 47.63, |
|
"grad_norm": 0.8493757843971252, |
|
"learning_rate": 9.481481481481481e-05, |
|
"loss": 0.8245, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 47.67, |
|
"grad_norm": 0.8431633114814758, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 0.8121, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 47.7, |
|
"grad_norm": 0.8720538020133972, |
|
"learning_rate": 9.185185185185186e-05, |
|
"loss": 0.8136, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 47.74, |
|
"grad_norm": 0.8341265320777893, |
|
"learning_rate": 9.037037037037036e-05, |
|
"loss": 0.8345, |
|
"step": 12890 |
|
}, |
|
{ |
|
"epoch": 47.78, |
|
"grad_norm": 0.853097677230835, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 0.8462, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 47.81, |
|
"grad_norm": 0.8465114831924438, |
|
"learning_rate": 8.740740740740741e-05, |
|
"loss": 0.8283, |
|
"step": 12910 |
|
}, |
|
{ |
|
"epoch": 47.85, |
|
"grad_norm": 0.8404960632324219, |
|
"learning_rate": 8.592592592592593e-05, |
|
"loss": 0.8428, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 47.89, |
|
"grad_norm": 0.9160316586494446, |
|
"learning_rate": 8.444444444444444e-05, |
|
"loss": 0.8383, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 47.93, |
|
"grad_norm": 0.8309608101844788, |
|
"learning_rate": 8.296296296296296e-05, |
|
"loss": 0.8282, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 47.96, |
|
"grad_norm": 0.9367932677268982, |
|
"learning_rate": 8.148148148148149e-05, |
|
"loss": 0.8221, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"grad_norm": 1.5219368934631348, |
|
"learning_rate": 8e-05, |
|
"loss": 0.8323, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 48.04, |
|
"grad_norm": 0.7839010953903198, |
|
"learning_rate": 7.851851851851852e-05, |
|
"loss": 0.7676, |
|
"step": 12970 |
|
}, |
|
{ |
|
"epoch": 48.07, |
|
"grad_norm": 0.7914835214614868, |
|
"learning_rate": 7.703703703703704e-05, |
|
"loss": 0.7613, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 48.11, |
|
"grad_norm": 0.7833623886108398, |
|
"learning_rate": 7.555555555555556e-05, |
|
"loss": 0.7553, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 48.15, |
|
"grad_norm": 0.7814247608184814, |
|
"learning_rate": 7.407407407407407e-05, |
|
"loss": 0.7786, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 48.19, |
|
"grad_norm": 0.7823523283004761, |
|
"learning_rate": 7.25925925925926e-05, |
|
"loss": 0.7827, |
|
"step": 13010 |
|
}, |
|
{ |
|
"epoch": 48.22, |
|
"grad_norm": 0.792614758014679, |
|
"learning_rate": 7.11111111111111e-05, |
|
"loss": 0.7865, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 48.26, |
|
"grad_norm": 0.793111264705658, |
|
"learning_rate": 6.962962962962962e-05, |
|
"loss": 0.7847, |
|
"step": 13030 |
|
}, |
|
{ |
|
"epoch": 48.3, |
|
"grad_norm": 0.8105340600013733, |
|
"learning_rate": 6.814814814814815e-05, |
|
"loss": 0.7835, |
|
"step": 13040 |
|
}, |
|
{ |
|
"epoch": 48.33, |
|
"grad_norm": 0.8111973404884338, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.7689, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 48.37, |
|
"grad_norm": 0.8513317108154297, |
|
"learning_rate": 6.518518518518519e-05, |
|
"loss": 0.7717, |
|
"step": 13060 |
|
}, |
|
{ |
|
"epoch": 48.41, |
|
"grad_norm": 0.8038240075111389, |
|
"learning_rate": 6.37037037037037e-05, |
|
"loss": 0.795, |
|
"step": 13070 |
|
}, |
|
{ |
|
"epoch": 48.44, |
|
"grad_norm": 0.7720206379890442, |
|
"learning_rate": 6.222222222222222e-05, |
|
"loss": 0.7906, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 48.48, |
|
"grad_norm": 0.8711926937103271, |
|
"learning_rate": 6.074074074074074e-05, |
|
"loss": 0.7849, |
|
"step": 13090 |
|
}, |
|
{ |
|
"epoch": 48.52, |
|
"grad_norm": 0.8024256229400635, |
|
"learning_rate": 5.925925925925926e-05, |
|
"loss": 0.7792, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 48.56, |
|
"grad_norm": 0.8680411577224731, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 0.7717, |
|
"step": 13110 |
|
}, |
|
{ |
|
"epoch": 48.59, |
|
"grad_norm": 0.8359400033950806, |
|
"learning_rate": 5.62962962962963e-05, |
|
"loss": 0.7976, |
|
"step": 13120 |
|
}, |
|
{ |
|
"epoch": 48.63, |
|
"grad_norm": 0.8003830313682556, |
|
"learning_rate": 5.4814814814814817e-05, |
|
"loss": 0.7872, |
|
"step": 13130 |
|
}, |
|
{ |
|
"epoch": 48.67, |
|
"grad_norm": 0.799286961555481, |
|
"learning_rate": 5.333333333333334e-05, |
|
"loss": 0.7912, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 48.7, |
|
"grad_norm": 0.8358856439590454, |
|
"learning_rate": 5.185185185185185e-05, |
|
"loss": 0.8027, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 48.74, |
|
"grad_norm": 0.8304854035377502, |
|
"learning_rate": 5.037037037037037e-05, |
|
"loss": 0.7769, |
|
"step": 13160 |
|
}, |
|
{ |
|
"epoch": 48.78, |
|
"grad_norm": 0.8182996511459351, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 0.7943, |
|
"step": 13170 |
|
}, |
|
{ |
|
"epoch": 48.81, |
|
"grad_norm": 0.8343393802642822, |
|
"learning_rate": 4.7407407407407407e-05, |
|
"loss": 0.798, |
|
"step": 13180 |
|
}, |
|
{ |
|
"epoch": 48.85, |
|
"grad_norm": 0.8523905277252197, |
|
"learning_rate": 4.592592592592593e-05, |
|
"loss": 0.8004, |
|
"step": 13190 |
|
}, |
|
{ |
|
"epoch": 48.89, |
|
"grad_norm": 0.8108332753181458, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.7972, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 48.93, |
|
"grad_norm": 0.8315889239311218, |
|
"learning_rate": 4.296296296296296e-05, |
|
"loss": 0.8369, |
|
"step": 13210 |
|
}, |
|
{ |
|
"epoch": 48.96, |
|
"grad_norm": 0.8833268880844116, |
|
"learning_rate": 4.148148148148148e-05, |
|
"loss": 0.8205, |
|
"step": 13220 |
|
}, |
|
{ |
|
"epoch": 49.0, |
|
"grad_norm": 1.7572418451309204, |
|
"learning_rate": 4e-05, |
|
"loss": 0.7834, |
|
"step": 13230 |
|
}, |
|
{ |
|
"epoch": 49.04, |
|
"grad_norm": 0.7579549551010132, |
|
"learning_rate": 3.851851851851852e-05, |
|
"loss": 0.7437, |
|
"step": 13240 |
|
}, |
|
{ |
|
"epoch": 49.07, |
|
"grad_norm": 0.8062260150909424, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.7523, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 49.11, |
|
"grad_norm": 0.758929967880249, |
|
"learning_rate": 3.555555555555555e-05, |
|
"loss": 0.7441, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 49.15, |
|
"grad_norm": 0.8011849522590637, |
|
"learning_rate": 3.4074074074074077e-05, |
|
"loss": 0.7644, |
|
"step": 13270 |
|
}, |
|
{ |
|
"epoch": 49.19, |
|
"grad_norm": 0.7755334377288818, |
|
"learning_rate": 3.259259259259259e-05, |
|
"loss": 0.7656, |
|
"step": 13280 |
|
}, |
|
{ |
|
"epoch": 49.22, |
|
"grad_norm": 0.7567688226699829, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 0.7662, |
|
"step": 13290 |
|
}, |
|
{ |
|
"epoch": 49.26, |
|
"grad_norm": 0.7689881920814514, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.7661, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 49.3, |
|
"grad_norm": 0.803693950176239, |
|
"learning_rate": 2.814814814814815e-05, |
|
"loss": 0.7798, |
|
"step": 13310 |
|
}, |
|
{ |
|
"epoch": 49.33, |
|
"grad_norm": 0.7691094875335693, |
|
"learning_rate": 2.666666666666667e-05, |
|
"loss": 0.7601, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 49.37, |
|
"grad_norm": 0.7872125506401062, |
|
"learning_rate": 2.5185185185185187e-05, |
|
"loss": 0.7695, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 49.41, |
|
"grad_norm": 0.7724753618240356, |
|
"learning_rate": 2.3703703703703703e-05, |
|
"loss": 0.7635, |
|
"step": 13340 |
|
}, |
|
{ |
|
"epoch": 49.44, |
|
"grad_norm": 0.7950810194015503, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.7501, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 49.48, |
|
"grad_norm": 0.8038039803504944, |
|
"learning_rate": 2.074074074074074e-05, |
|
"loss": 0.7426, |
|
"step": 13360 |
|
}, |
|
{ |
|
"epoch": 49.52, |
|
"grad_norm": 0.7851365804672241, |
|
"learning_rate": 1.925925925925926e-05, |
|
"loss": 0.7925, |
|
"step": 13370 |
|
}, |
|
{ |
|
"epoch": 49.56, |
|
"grad_norm": 0.797153115272522, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 0.7748, |
|
"step": 13380 |
|
}, |
|
{ |
|
"epoch": 49.59, |
|
"grad_norm": 0.7819933891296387, |
|
"learning_rate": 1.6296296296296297e-05, |
|
"loss": 0.7626, |
|
"step": 13390 |
|
}, |
|
{ |
|
"epoch": 49.63, |
|
"grad_norm": 0.7707337737083435, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.7607, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 49.67, |
|
"grad_norm": 0.7730938792228699, |
|
"learning_rate": 1.3333333333333335e-05, |
|
"loss": 0.7886, |
|
"step": 13410 |
|
}, |
|
{ |
|
"epoch": 49.7, |
|
"grad_norm": 0.8007907867431641, |
|
"learning_rate": 1.1851851851851852e-05, |
|
"loss": 0.7844, |
|
"step": 13420 |
|
}, |
|
{ |
|
"epoch": 49.74, |
|
"grad_norm": 0.7911803126335144, |
|
"learning_rate": 1.037037037037037e-05, |
|
"loss": 0.8026, |
|
"step": 13430 |
|
}, |
|
{ |
|
"epoch": 49.78, |
|
"grad_norm": 0.8312400579452515, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.7486, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 49.81, |
|
"grad_norm": 0.7924412488937378, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.7762, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 49.85, |
|
"grad_norm": 0.8182461261749268, |
|
"learning_rate": 5.925925925925926e-06, |
|
"loss": 0.7547, |
|
"step": 13460 |
|
}, |
|
{ |
|
"epoch": 49.89, |
|
"grad_norm": 0.843571126461029, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.7633, |
|
"step": 13470 |
|
}, |
|
{ |
|
"epoch": 49.93, |
|
"grad_norm": 0.7726001739501953, |
|
"learning_rate": 2.962962962962963e-06, |
|
"loss": 0.7705, |
|
"step": 13480 |
|
}, |
|
{ |
|
"epoch": 49.96, |
|
"grad_norm": 0.8144612312316895, |
|
"learning_rate": 1.4814814814814815e-06, |
|
"loss": 0.777, |
|
"step": 13490 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"grad_norm": 1.684682846069336, |
|
"learning_rate": 0.0, |
|
"loss": 0.7724, |
|
"step": 13500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 13500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 50, |
|
"save_steps": 500, |
|
"total_flos": 1.6910948327464694e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|