|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.991111111111111, |
|
"eval_steps": 500, |
|
"global_step": 112, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 6.02266263961792, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.9993, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 6.143394947052002, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.0111, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 5.803601264953613, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9858, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 4.266067028045654, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.9484, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 2.9427103996276855, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.9376, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 4.489638805389404, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9214, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 4.621401786804199, |
|
"learning_rate": 1.1666666666666668e-05, |
|
"loss": 0.9122, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 3.9608113765716553, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.8665, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.7279064655303955, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.8395, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 2.407290458679199, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.8198, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 2.280531644821167, |
|
"learning_rate": 1.8333333333333333e-05, |
|
"loss": 0.7984, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 2.6176528930664062, |
|
"learning_rate": 2e-05, |
|
"loss": 0.794, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 1.707384467124939, |
|
"learning_rate": 1.9995065603657317e-05, |
|
"loss": 0.783, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 1.9082920551300049, |
|
"learning_rate": 1.9980267284282718e-05, |
|
"loss": 0.7379, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 1.7811676263809204, |
|
"learning_rate": 1.99556196460308e-05, |
|
"loss": 0.7524, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 1.3555017709732056, |
|
"learning_rate": 1.9921147013144782e-05, |
|
"loss": 0.754, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 1.3673418760299683, |
|
"learning_rate": 1.9876883405951378e-05, |
|
"loss": 0.7073, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4924910068511963, |
|
"learning_rate": 1.982287250728689e-05, |
|
"loss": 0.7159, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 1.6850374937057495, |
|
"learning_rate": 1.9759167619387474e-05, |
|
"loss": 0.7114, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 1.511631727218628, |
|
"learning_rate": 1.9685831611286312e-05, |
|
"loss": 0.7012, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 1.2830287218093872, |
|
"learning_rate": 1.9602936856769432e-05, |
|
"loss": 0.6948, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 1.2557350397109985, |
|
"learning_rate": 1.9510565162951538e-05, |
|
"loss": 0.7046, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 0.9395996928215027, |
|
"learning_rate": 1.9408807689542257e-05, |
|
"loss": 0.7076, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 1.0723974704742432, |
|
"learning_rate": 1.9297764858882516e-05, |
|
"loss": 0.6983, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 1.0539971590042114, |
|
"learning_rate": 1.9177546256839814e-05, |
|
"loss": 0.6839, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 0.9047765135765076, |
|
"learning_rate": 1.9048270524660197e-05, |
|
"loss": 0.6768, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9144095182418823, |
|
"learning_rate": 1.891006524188368e-05, |
|
"loss": 0.6773, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 0.8761118054389954, |
|
"learning_rate": 1.8763066800438638e-05, |
|
"loss": 0.6852, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 0.9473320841789246, |
|
"learning_rate": 1.860742027003944e-05, |
|
"loss": 0.6882, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 1.0702017545700073, |
|
"learning_rate": 1.8443279255020153e-05, |
|
"loss": 0.6677, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 0.7134117484092712, |
|
"learning_rate": 1.827080574274562e-05, |
|
"loss": 0.6696, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 0.8997704386711121, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 0.6484, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 0.7318180203437805, |
|
"learning_rate": 1.7901550123756906e-05, |
|
"loss": 0.6859, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 0.8574209213256836, |
|
"learning_rate": 1.7705132427757895e-05, |
|
"loss": 0.6557, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 0.7167562246322632, |
|
"learning_rate": 1.7501110696304598e-05, |
|
"loss": 0.6511, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.701689600944519, |
|
"learning_rate": 1.7289686274214116e-05, |
|
"loss": 0.6603, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 0.6708585023880005, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.6674, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 0.7055069208145142, |
|
"learning_rate": 1.684547105928689e-05, |
|
"loss": 0.6624, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 0.6366652250289917, |
|
"learning_rate": 1.661311865323652e-05, |
|
"loss": 0.659, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 0.7192656397819519, |
|
"learning_rate": 1.63742398974869e-05, |
|
"loss": 0.6403, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 0.7419708967208862, |
|
"learning_rate": 1.6129070536529767e-05, |
|
"loss": 0.6649, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 0.6875132918357849, |
|
"learning_rate": 1.5877852522924733e-05, |
|
"loss": 0.6636, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 0.7200429439544678, |
|
"learning_rate": 1.5620833778521306e-05, |
|
"loss": 0.6464, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 0.6726692318916321, |
|
"learning_rate": 1.5358267949789968e-05, |
|
"loss": 0.6513, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.6832252740859985, |
|
"learning_rate": 1.5090414157503715e-05, |
|
"loss": 0.6625, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 0.7112930417060852, |
|
"learning_rate": 1.4817536741017153e-05, |
|
"loss": 0.6471, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 0.671824038028717, |
|
"learning_rate": 1.4539904997395468e-05, |
|
"loss": 0.6625, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 0.6902845501899719, |
|
"learning_rate": 1.4257792915650728e-05, |
|
"loss": 0.6374, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 0.5699530243873596, |
|
"learning_rate": 1.3971478906347806e-05, |
|
"loss": 0.6431, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 0.6002618670463562, |
|
"learning_rate": 1.3681245526846782e-05, |
|
"loss": 0.6381, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 0.576767086982727, |
|
"learning_rate": 1.3387379202452917e-05, |
|
"loss": 0.6338, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 0.6442639231681824, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 0.6502, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 0.651739776134491, |
|
"learning_rate": 1.2789911060392295e-05, |
|
"loss": 0.6449, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.8692764043807983, |
|
"learning_rate": 1.2486898871648552e-05, |
|
"loss": 0.6511, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 0.6121811270713806, |
|
"learning_rate": 1.2181432413965428e-05, |
|
"loss": 0.6367, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 0.8803330659866333, |
|
"learning_rate": 1.187381314585725e-05, |
|
"loss": 0.6532, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.0133333333333334, |
|
"grad_norm": 1.236048698425293, |
|
"learning_rate": 1.156434465040231e-05, |
|
"loss": 1.0796, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.031111111111111, |
|
"grad_norm": 0.899872362613678, |
|
"learning_rate": 1.1253332335643043e-05, |
|
"loss": 0.634, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.048888888888889, |
|
"grad_norm": 0.5719662308692932, |
|
"learning_rate": 1.0941083133185146e-05, |
|
"loss": 0.6002, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.788408100605011, |
|
"learning_rate": 1.0627905195293135e-05, |
|
"loss": 0.5628, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0844444444444445, |
|
"grad_norm": 0.7448046207427979, |
|
"learning_rate": 1.0314107590781284e-05, |
|
"loss": 0.6532, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.1022222222222222, |
|
"grad_norm": 0.8060784935951233, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5744, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.7961754202842712, |
|
"learning_rate": 9.685892409218718e-06, |
|
"loss": 0.5798, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.1377777777777778, |
|
"grad_norm": 0.6215574145317078, |
|
"learning_rate": 9.372094804706867e-06, |
|
"loss": 0.567, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.1555555555555554, |
|
"grad_norm": 0.7157091498374939, |
|
"learning_rate": 9.058916866814857e-06, |
|
"loss": 0.6152, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.1733333333333333, |
|
"grad_norm": 0.6254159212112427, |
|
"learning_rate": 8.746667664356957e-06, |
|
"loss": 0.6099, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1911111111111112, |
|
"grad_norm": 0.5155752897262573, |
|
"learning_rate": 8.43565534959769e-06, |
|
"loss": 0.5508, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.208888888888889, |
|
"grad_norm": 0.5491471886634827, |
|
"learning_rate": 8.126186854142752e-06, |
|
"loss": 0.625, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.2266666666666666, |
|
"grad_norm": 0.5200133919715881, |
|
"learning_rate": 7.818567586034578e-06, |
|
"loss": 0.5666, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 0.5425897240638733, |
|
"learning_rate": 7.513101128351454e-06, |
|
"loss": 0.6, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.2622222222222224, |
|
"grad_norm": 0.420964777469635, |
|
"learning_rate": 7.210088939607709e-06, |
|
"loss": 0.5466, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.5204867124557495, |
|
"learning_rate": 6.909830056250527e-06, |
|
"loss": 0.6297, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2977777777777777, |
|
"grad_norm": 0.491071879863739, |
|
"learning_rate": 6.612620797547087e-06, |
|
"loss": 0.5333, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.3155555555555556, |
|
"grad_norm": 0.4720323979854584, |
|
"learning_rate": 6.318754473153221e-06, |
|
"loss": 0.6132, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.430493026971817, |
|
"learning_rate": 6.028521093652195e-06, |
|
"loss": 0.5587, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.3511111111111112, |
|
"grad_norm": 0.583470344543457, |
|
"learning_rate": 5.742207084349274e-06, |
|
"loss": 0.6111, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.3688888888888888, |
|
"grad_norm": 0.5109282732009888, |
|
"learning_rate": 5.460095002604533e-06, |
|
"loss": 0.5848, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.3866666666666667, |
|
"grad_norm": 0.5880340933799744, |
|
"learning_rate": 5.1824632589828465e-06, |
|
"loss": 0.6158, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.4044444444444444, |
|
"grad_norm": 0.5229690670967102, |
|
"learning_rate": 4.909585842496287e-06, |
|
"loss": 0.5424, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 0.5499680638313293, |
|
"learning_rate": 4.641732050210032e-06, |
|
"loss": 0.5793, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.42541825771331787, |
|
"learning_rate": 4.379166221478697e-06, |
|
"loss": 0.5984, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.4577777777777778, |
|
"grad_norm": 0.46670961380004883, |
|
"learning_rate": 4.12214747707527e-06, |
|
"loss": 0.621, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.4755555555555555, |
|
"grad_norm": 0.4210701286792755, |
|
"learning_rate": 3.8709294634702374e-06, |
|
"loss": 0.5242, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.4933333333333334, |
|
"grad_norm": 0.4062272310256958, |
|
"learning_rate": 3.625760102513103e-06, |
|
"loss": 0.563, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.511111111111111, |
|
"grad_norm": 0.42036882042884827, |
|
"learning_rate": 3.3868813467634833e-06, |
|
"loss": 0.6099, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.528888888888889, |
|
"grad_norm": 0.413107693195343, |
|
"learning_rate": 3.1545289407131128e-06, |
|
"loss": 0.5765, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.5466666666666666, |
|
"grad_norm": 0.4331943392753601, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.5988, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.5644444444444443, |
|
"grad_norm": 0.4215940535068512, |
|
"learning_rate": 2.7103137257858867e-06, |
|
"loss": 0.5901, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.5822222222222222, |
|
"grad_norm": 0.3558850586414337, |
|
"learning_rate": 2.4988893036954045e-06, |
|
"loss": 0.5578, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.35593435168266296, |
|
"learning_rate": 2.2948675722421086e-06, |
|
"loss": 0.5952, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.6177777777777778, |
|
"grad_norm": 0.35841694474220276, |
|
"learning_rate": 2.098449876243096e-06, |
|
"loss": 0.5729, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.6355555555555554, |
|
"grad_norm": 0.36976689100265503, |
|
"learning_rate": 1.9098300562505266e-06, |
|
"loss": 0.6167, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.6533333333333333, |
|
"grad_norm": 0.34893473982810974, |
|
"learning_rate": 1.7291942572543806e-06, |
|
"loss": 0.558, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.6711111111111112, |
|
"grad_norm": 0.3704639673233032, |
|
"learning_rate": 1.5567207449798517e-06, |
|
"loss": 0.5948, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.6888888888888889, |
|
"grad_norm": 0.3578838109970093, |
|
"learning_rate": 1.3925797299605649e-06, |
|
"loss": 0.5715, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.7066666666666666, |
|
"grad_norm": 0.35762616991996765, |
|
"learning_rate": 1.2369331995613664e-06, |
|
"loss": 0.5936, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.7244444444444444, |
|
"grad_norm": 0.3288617730140686, |
|
"learning_rate": 1.0899347581163222e-06, |
|
"loss": 0.5924, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.7422222222222223, |
|
"grad_norm": 0.35121095180511475, |
|
"learning_rate": 9.517294753398066e-07, |
|
"loss": 0.575, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.32291051745414734, |
|
"learning_rate": 8.224537431601886e-07, |
|
"loss": 0.5367, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 0.37526610493659973, |
|
"learning_rate": 7.022351411174866e-07, |
|
"loss": 0.5957, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.7955555555555556, |
|
"grad_norm": 0.34747347235679626, |
|
"learning_rate": 5.911923104577455e-07, |
|
"loss": 0.5824, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.8133333333333335, |
|
"grad_norm": 0.3201601803302765, |
|
"learning_rate": 4.894348370484648e-07, |
|
"loss": 0.561, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.8311111111111111, |
|
"grad_norm": 0.36945855617523193, |
|
"learning_rate": 3.9706314323056936e-07, |
|
"loss": 0.5761, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.8488888888888888, |
|
"grad_norm": 0.3337363600730896, |
|
"learning_rate": 3.1416838871368925e-07, |
|
"loss": 0.5536, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.35673728585243225, |
|
"learning_rate": 2.4083238061252565e-07, |
|
"loss": 0.6234, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.8844444444444446, |
|
"grad_norm": 0.3251909613609314, |
|
"learning_rate": 1.7712749271311392e-07, |
|
"loss": 0.5415, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.9022222222222223, |
|
"grad_norm": 0.35213449597358704, |
|
"learning_rate": 1.231165940486234e-07, |
|
"loss": 0.5611, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 0.34044721722602844, |
|
"learning_rate": 7.885298685522235e-08, |
|
"loss": 0.5628, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.9377777777777778, |
|
"grad_norm": 0.34606897830963135, |
|
"learning_rate": 4.438035396920004e-08, |
|
"loss": 0.5907, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 0.3360365629196167, |
|
"learning_rate": 1.973271571728441e-08, |
|
"loss": 0.6009, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.9733333333333334, |
|
"grad_norm": 0.3323586583137512, |
|
"learning_rate": 4.9343963426840006e-09, |
|
"loss": 0.5773, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"grad_norm": 0.3478974997997284, |
|
"learning_rate": 0.0, |
|
"loss": 0.5963, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"step": 112, |
|
"total_flos": 185195202609152.0, |
|
"train_loss": 0.65867241738098, |
|
"train_runtime": 12992.6194, |
|
"train_samples_per_second": 0.83, |
|
"train_steps_per_second": 0.009 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 112, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 185195202609152.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|