|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.021462105969148, |
|
"eval_steps": 47, |
|
"global_step": 376, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0053655264922870555, |
|
"grad_norm": 1.2779298491774165, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 2.0226, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0053655264922870555, |
|
"eval_loss": 3.5763909816741943, |
|
"eval_runtime": 403.4915, |
|
"eval_samples_per_second": 6.317, |
|
"eval_steps_per_second": 0.791, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010731052984574111, |
|
"grad_norm": 0.9868395151451838, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 2.0251, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01609657947686117, |
|
"grad_norm": 1.124719320101948, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 2.0676, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.021462105969148222, |
|
"grad_norm": 0.8967993867216901, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 2.0685, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02682763246143528, |
|
"grad_norm": 0.8530329942799596, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 1.975, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03219315895372234, |
|
"grad_norm": 17.572932401470002, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 2.0319, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03755868544600939, |
|
"grad_norm": 1.5955614660997672, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 1.9776, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.042924211938296444, |
|
"grad_norm": 1.0305843114650708, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 1.9384, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0482897384305835, |
|
"grad_norm": 0.7931768664816836, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 2.0357, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05365526492287056, |
|
"grad_norm": 0.7458181490503686, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 1.9343, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05902079141515761, |
|
"grad_norm": 1.0379874858551188, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 1.9069, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06438631790744467, |
|
"grad_norm": 0.9634098196921086, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 1.8831, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06975184439973173, |
|
"grad_norm": 0.8492217551097285, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 1.9665, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07511737089201878, |
|
"grad_norm": 0.8866414055564286, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 1.9859, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08048289738430583, |
|
"grad_norm": 0.8975616587977772, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 1.9621, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08584842387659289, |
|
"grad_norm": 0.8856060845121474, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 2.0504, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09121395036887994, |
|
"grad_norm": 0.7934430921825818, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 1.8965, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.096579476861167, |
|
"grad_norm": 0.8411958882279533, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.834, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.10194500335345406, |
|
"grad_norm": 0.9605732267993595, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 2.0321, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.10731052984574112, |
|
"grad_norm": 0.8964432934098547, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 1.9165, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11267605633802817, |
|
"grad_norm": 0.8325547065304583, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 2.0088, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.11804158283031523, |
|
"grad_norm": 0.7337982135739225, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 1.8652, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.12340710932260228, |
|
"grad_norm": 0.8703749413838572, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 1.9384, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.12877263581488935, |
|
"grad_norm": 0.752681510079145, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 1.84, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1341381623071764, |
|
"grad_norm": 0.7664305287185245, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 1.917, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.13950368879946345, |
|
"grad_norm": 0.7376417365374465, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 1.8995, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1448692152917505, |
|
"grad_norm": 0.6723119101020634, |
|
"learning_rate": 5e-05, |
|
"loss": 1.8572, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.15023474178403756, |
|
"grad_norm": 0.6636335261172684, |
|
"learning_rate": 4.999956245830044e-05, |
|
"loss": 1.8311, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.15560026827632462, |
|
"grad_norm": 0.7118137563548644, |
|
"learning_rate": 4.9998249848517185e-05, |
|
"loss": 1.9461, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.16096579476861167, |
|
"grad_norm": 0.5571924001587539, |
|
"learning_rate": 4.999606221659595e-05, |
|
"loss": 1.9125, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.16633132126089872, |
|
"grad_norm": 0.5895748551871794, |
|
"learning_rate": 4.999299963911115e-05, |
|
"loss": 1.9295, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.17169684775318578, |
|
"grad_norm": 0.5114717351226575, |
|
"learning_rate": 4.9989062223263216e-05, |
|
"loss": 1.8642, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.17706237424547283, |
|
"grad_norm": 0.5658751591114588, |
|
"learning_rate": 4.998425010687484e-05, |
|
"loss": 1.9465, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.18242790073775988, |
|
"grad_norm": 0.4198888613535172, |
|
"learning_rate": 4.997856345838615e-05, |
|
"loss": 1.7824, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.18779342723004694, |
|
"grad_norm": 0.558439585084249, |
|
"learning_rate": 4.99720024768488e-05, |
|
"loss": 1.9157, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.193158953722334, |
|
"grad_norm": 0.4888963940737741, |
|
"learning_rate": 4.996456739191905e-05, |
|
"loss": 1.9136, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.19852448021462105, |
|
"grad_norm": 0.5106753170932593, |
|
"learning_rate": 4.995625846384966e-05, |
|
"loss": 1.8623, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.20389000670690813, |
|
"grad_norm": 0.5942581620888907, |
|
"learning_rate": 4.994707598348085e-05, |
|
"loss": 1.8741, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.20925553319919518, |
|
"grad_norm": 0.5106367767120957, |
|
"learning_rate": 4.993702027223004e-05, |
|
"loss": 1.9413, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.21462105969148224, |
|
"grad_norm": 0.5113103948113709, |
|
"learning_rate": 4.992609168208069e-05, |
|
"loss": 1.9966, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2199865861837693, |
|
"grad_norm": 0.5212461839914251, |
|
"learning_rate": 4.9914290595569895e-05, |
|
"loss": 1.8474, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.22535211267605634, |
|
"grad_norm": 0.5161668483087154, |
|
"learning_rate": 4.9901617425775067e-05, |
|
"loss": 1.8557, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2307176391683434, |
|
"grad_norm": 0.5484719777790631, |
|
"learning_rate": 4.988807261629942e-05, |
|
"loss": 1.8769, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.23608316566063045, |
|
"grad_norm": 0.4937411819105057, |
|
"learning_rate": 4.987365664125647e-05, |
|
"loss": 1.8923, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2414486921529175, |
|
"grad_norm": 0.5804971756745295, |
|
"learning_rate": 4.985837000525343e-05, |
|
"loss": 1.954, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.24681421864520456, |
|
"grad_norm": 0.531027559174979, |
|
"learning_rate": 4.984221324337356e-05, |
|
"loss": 1.9758, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.25217974513749164, |
|
"grad_norm": 0.5132447531432143, |
|
"learning_rate": 4.982518692115744e-05, |
|
"loss": 1.8588, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.25217974513749164, |
|
"eval_loss": 2.2616188526153564, |
|
"eval_runtime": 403.9965, |
|
"eval_samples_per_second": 6.309, |
|
"eval_steps_per_second": 0.79, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2575452716297787, |
|
"grad_norm": 0.4799971353542736, |
|
"learning_rate": 4.980729163458312e-05, |
|
"loss": 1.8558, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.26291079812206575, |
|
"grad_norm": 0.4318157242619782, |
|
"learning_rate": 4.978852801004534e-05, |
|
"loss": 1.9574, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2682763246143528, |
|
"grad_norm": 0.44416051699732234, |
|
"learning_rate": 4.976889670433355e-05, |
|
"loss": 1.7988, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.27364185110663986, |
|
"grad_norm": 0.42380891285410505, |
|
"learning_rate": 4.974839840460895e-05, |
|
"loss": 1.8296, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2790073775989269, |
|
"grad_norm": 0.4989220387491634, |
|
"learning_rate": 4.97270338283804e-05, |
|
"loss": 1.8996, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.28437290409121396, |
|
"grad_norm": 0.44554710351703636, |
|
"learning_rate": 4.970480372347934e-05, |
|
"loss": 1.9553, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.289738430583501, |
|
"grad_norm": 0.5597504355268026, |
|
"learning_rate": 4.9681708868033616e-05, |
|
"loss": 1.9979, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.29510395707578807, |
|
"grad_norm": 0.4758356280331154, |
|
"learning_rate": 4.9657750070440196e-05, |
|
"loss": 1.9244, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3004694835680751, |
|
"grad_norm": 0.4337532785073617, |
|
"learning_rate": 4.963292816933692e-05, |
|
"loss": 1.9099, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3058350100603622, |
|
"grad_norm": 0.48193864337289033, |
|
"learning_rate": 4.9607244033573156e-05, |
|
"loss": 1.8076, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.31120053655264923, |
|
"grad_norm": 0.5180685237569209, |
|
"learning_rate": 4.9580698562179297e-05, |
|
"loss": 1.9031, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3165660630449363, |
|
"grad_norm": 0.4227856708321325, |
|
"learning_rate": 4.955329268433543e-05, |
|
"loss": 1.8111, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.32193158953722334, |
|
"grad_norm": 0.4457771321490526, |
|
"learning_rate": 4.9525027359338696e-05, |
|
"loss": 1.8771, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3272971160295104, |
|
"grad_norm": 0.383809821251567, |
|
"learning_rate": 4.949590357656975e-05, |
|
"loss": 1.8384, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.33266264252179745, |
|
"grad_norm": 0.5245332667935808, |
|
"learning_rate": 4.946592235545815e-05, |
|
"loss": 1.948, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3380281690140845, |
|
"grad_norm": 0.48131587989680386, |
|
"learning_rate": 4.9435084745446666e-05, |
|
"loss": 1.9546, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.34339369550637155, |
|
"grad_norm": 0.47579745445020977, |
|
"learning_rate": 4.940339182595451e-05, |
|
"loss": 1.9085, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3487592219986586, |
|
"grad_norm": 0.5162389548444078, |
|
"learning_rate": 4.9370844706339594e-05, |
|
"loss": 1.9263, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.35412474849094566, |
|
"grad_norm": 0.4430593262438425, |
|
"learning_rate": 4.933744452585966e-05, |
|
"loss": 1.8264, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3594902749832327, |
|
"grad_norm": 0.4564426740705147, |
|
"learning_rate": 4.930319245363248e-05, |
|
"loss": 1.7974, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.36485580147551977, |
|
"grad_norm": 0.4896650401826788, |
|
"learning_rate": 4.926808968859483e-05, |
|
"loss": 1.8567, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3702213279678068, |
|
"grad_norm": 0.4051071793789495, |
|
"learning_rate": 4.923213745946059e-05, |
|
"loss": 1.8201, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3755868544600939, |
|
"grad_norm": 0.48978661474326063, |
|
"learning_rate": 4.919533702467771e-05, |
|
"loss": 1.8006, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.5175966382748892, |
|
"learning_rate": 4.9157689672384174e-05, |
|
"loss": 1.7904, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.386317907444668, |
|
"grad_norm": 0.49081107125596507, |
|
"learning_rate": 4.91191967203629e-05, |
|
"loss": 1.9606, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.39168343393695504, |
|
"grad_norm": 0.5920859101699153, |
|
"learning_rate": 4.907985951599563e-05, |
|
"loss": 1.8124, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3970489604292421, |
|
"grad_norm": 0.4106853545996278, |
|
"learning_rate": 4.9039679436215734e-05, |
|
"loss": 1.9402, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.4024144869215292, |
|
"grad_norm": 0.6429573337250454, |
|
"learning_rate": 4.899865788746005e-05, |
|
"loss": 1.8233, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.40778001341381626, |
|
"grad_norm": 0.5770635817407002, |
|
"learning_rate": 4.895679630561963e-05, |
|
"loss": 1.8909, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.4131455399061033, |
|
"grad_norm": 0.8202937145800111, |
|
"learning_rate": 4.891409615598949e-05, |
|
"loss": 1.8635, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.41851106639839036, |
|
"grad_norm": 0.6871160286583309, |
|
"learning_rate": 4.88705589332173e-05, |
|
"loss": 1.9497, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4238765928906774, |
|
"grad_norm": 0.4438407032445468, |
|
"learning_rate": 4.882618616125111e-05, |
|
"loss": 1.9336, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.42924211938296447, |
|
"grad_norm": 0.545225076259868, |
|
"learning_rate": 4.878097939328596e-05, |
|
"loss": 1.843, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4346076458752515, |
|
"grad_norm": 0.5120969176703908, |
|
"learning_rate": 4.873494021170953e-05, |
|
"loss": 1.9019, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.4399731723675386, |
|
"grad_norm": 0.41579446347477983, |
|
"learning_rate": 4.868807022804678e-05, |
|
"loss": 1.8617, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.44533869885982563, |
|
"grad_norm": 0.5510693357302907, |
|
"learning_rate": 4.864037108290347e-05, |
|
"loss": 1.9433, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4507042253521127, |
|
"grad_norm": 0.40913367019675556, |
|
"learning_rate": 4.859184444590882e-05, |
|
"loss": 1.8353, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.45606975184439974, |
|
"grad_norm": 0.3763529830498608, |
|
"learning_rate": 4.854249201565701e-05, |
|
"loss": 1.7299, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4614352783366868, |
|
"grad_norm": 0.47448012995411376, |
|
"learning_rate": 4.849231551964771e-05, |
|
"loss": 1.8547, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.46680080482897385, |
|
"grad_norm": 0.4181797287844973, |
|
"learning_rate": 4.84413167142257e-05, |
|
"loss": 1.8186, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4721663313212609, |
|
"grad_norm": 0.4781785535505977, |
|
"learning_rate": 4.838949738451929e-05, |
|
"loss": 1.8965, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.47753185781354796, |
|
"grad_norm": 0.4351347675471463, |
|
"learning_rate": 4.833685934437787e-05, |
|
"loss": 1.863, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.482897384305835, |
|
"grad_norm": 0.3874884362959953, |
|
"learning_rate": 4.8283404436308464e-05, |
|
"loss": 1.7396, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.48826291079812206, |
|
"grad_norm": 0.49444219921157534, |
|
"learning_rate": 4.8229134531411166e-05, |
|
"loss": 1.8444, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4936284372904091, |
|
"grad_norm": 0.39922493357750344, |
|
"learning_rate": 4.8174051529313704e-05, |
|
"loss": 1.8247, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.49899396378269617, |
|
"grad_norm": 0.5511215543224721, |
|
"learning_rate": 4.81181573581049e-05, |
|
"loss": 1.9533, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5043594902749833, |
|
"grad_norm": 0.41108793496635143, |
|
"learning_rate": 4.8061453974267195e-05, |
|
"loss": 1.7952, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.5043594902749833, |
|
"eval_loss": 2.198935031890869, |
|
"eval_runtime": 403.21, |
|
"eval_samples_per_second": 6.322, |
|
"eval_steps_per_second": 0.791, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.5097250167672703, |
|
"grad_norm": 0.43234228757508586, |
|
"learning_rate": 4.80039433626082e-05, |
|
"loss": 1.8667, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5150905432595574, |
|
"grad_norm": 0.40707489762011484, |
|
"learning_rate": 4.7945627536191166e-05, |
|
"loss": 1.9081, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5204560697518444, |
|
"grad_norm": 0.41662703960667113, |
|
"learning_rate": 4.788650853626456e-05, |
|
"loss": 1.7536, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5258215962441315, |
|
"grad_norm": 0.39754454922458654, |
|
"learning_rate": 4.7826588432190614e-05, |
|
"loss": 1.8851, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5311871227364185, |
|
"grad_norm": 0.41218018944985396, |
|
"learning_rate": 4.7765869321372836e-05, |
|
"loss": 1.8062, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5365526492287056, |
|
"grad_norm": 0.41546782428712065, |
|
"learning_rate": 4.7704353329182673e-05, |
|
"loss": 1.8491, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5419181757209927, |
|
"grad_norm": 0.47175537335479406, |
|
"learning_rate": 4.7642042608885064e-05, |
|
"loss": 1.7917, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5472837022132797, |
|
"grad_norm": 0.4373071645022652, |
|
"learning_rate": 4.7578939341563095e-05, |
|
"loss": 1.8632, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5526492287055668, |
|
"grad_norm": 0.4369286574512984, |
|
"learning_rate": 4.751504573604162e-05, |
|
"loss": 1.8377, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5580147551978538, |
|
"grad_norm": 0.40840919093725575, |
|
"learning_rate": 4.745036402880999e-05, |
|
"loss": 1.8397, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 0.4023593789216688, |
|
"learning_rate": 4.738489648394373e-05, |
|
"loss": 1.9389, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5687458081824279, |
|
"grad_norm": 0.45139851873493203, |
|
"learning_rate": 4.731864539302531e-05, |
|
"loss": 1.9384, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.574111334674715, |
|
"grad_norm": 0.518973698127367, |
|
"learning_rate": 4.725161307506391e-05, |
|
"loss": 1.8388, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.579476861167002, |
|
"grad_norm": 0.41440109186821383, |
|
"learning_rate": 4.7183801876414294e-05, |
|
"loss": 1.9807, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5848423876592891, |
|
"grad_norm": 0.485066583270121, |
|
"learning_rate": 4.711521417069462e-05, |
|
"loss": 1.9518, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5902079141515761, |
|
"grad_norm": 0.453283667200083, |
|
"learning_rate": 4.70458523587034e-05, |
|
"loss": 1.8874, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5955734406438632, |
|
"grad_norm": 0.40638037542695366, |
|
"learning_rate": 4.697571886833544e-05, |
|
"loss": 1.8138, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6009389671361502, |
|
"grad_norm": 0.47718260435002885, |
|
"learning_rate": 4.6904816154496854e-05, |
|
"loss": 1.8409, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.6063044936284373, |
|
"grad_norm": 0.45172842172725275, |
|
"learning_rate": 4.683314669901918e-05, |
|
"loss": 1.9261, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6116700201207244, |
|
"grad_norm": 0.4713867113135493, |
|
"learning_rate": 4.676071301057243e-05, |
|
"loss": 1.8502, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.6170355466130114, |
|
"grad_norm": 0.42451831321750794, |
|
"learning_rate": 4.668751762457734e-05, |
|
"loss": 1.8489, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6224010731052985, |
|
"grad_norm": 0.3958564758105134, |
|
"learning_rate": 4.6613563103116594e-05, |
|
"loss": 1.8922, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.6277665995975855, |
|
"grad_norm": 0.4009292301802374, |
|
"learning_rate": 4.653885203484515e-05, |
|
"loss": 1.8685, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6331321260898726, |
|
"grad_norm": 0.37598195515170946, |
|
"learning_rate": 4.6463387034899645e-05, |
|
"loss": 1.7889, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6384976525821596, |
|
"grad_norm": 0.4710597532282601, |
|
"learning_rate": 4.638717074480682e-05, |
|
"loss": 1.8739, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6438631790744467, |
|
"grad_norm": 0.4146663267990981, |
|
"learning_rate": 4.631020583239107e-05, |
|
"loss": 1.8988, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6492287055667337, |
|
"grad_norm": 0.4354294422619394, |
|
"learning_rate": 4.6232494991681094e-05, |
|
"loss": 1.8517, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6545942320590208, |
|
"grad_norm": 0.5470562094587934, |
|
"learning_rate": 4.615404094281554e-05, |
|
"loss": 1.812, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.6599597585513078, |
|
"grad_norm": 0.37147901827060426, |
|
"learning_rate": 4.607484643194788e-05, |
|
"loss": 1.8447, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6653252850435949, |
|
"grad_norm": 0.462225949803499, |
|
"learning_rate": 4.599491423115014e-05, |
|
"loss": 1.8442, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.670690811535882, |
|
"grad_norm": 0.4600536423365438, |
|
"learning_rate": 4.5914247138316025e-05, |
|
"loss": 1.926, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.676056338028169, |
|
"grad_norm": 0.4049920788749757, |
|
"learning_rate": 4.5832847977062874e-05, |
|
"loss": 1.8001, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.681421864520456, |
|
"grad_norm": 0.4052262687972146, |
|
"learning_rate": 4.5750719596632885e-05, |
|
"loss": 1.8616, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6867873910127431, |
|
"grad_norm": 0.455611587260568, |
|
"learning_rate": 4.5667864871793345e-05, |
|
"loss": 1.7792, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6921529175050302, |
|
"grad_norm": 0.4321930788528261, |
|
"learning_rate": 4.558428670273601e-05, |
|
"loss": 1.911, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6975184439973172, |
|
"grad_norm": 0.3922798652619637, |
|
"learning_rate": 4.549998801497564e-05, |
|
"loss": 1.8979, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.7028839704896043, |
|
"grad_norm": 0.4338149926251678, |
|
"learning_rate": 4.54149717592475e-05, |
|
"loss": 1.793, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7082494969818913, |
|
"grad_norm": 0.42943405977885124, |
|
"learning_rate": 4.532924091140417e-05, |
|
"loss": 1.8606, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7136150234741784, |
|
"grad_norm": 0.4145321079739444, |
|
"learning_rate": 4.524279847231131e-05, |
|
"loss": 1.906, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.7189805499664654, |
|
"grad_norm": 0.38959038753607894, |
|
"learning_rate": 4.515564746774265e-05, |
|
"loss": 1.7323, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.7243460764587525, |
|
"grad_norm": 0.49146581928185773, |
|
"learning_rate": 4.5067790948274094e-05, |
|
"loss": 1.8303, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.7297116029510395, |
|
"grad_norm": 0.4638570517033698, |
|
"learning_rate": 4.49792319891769e-05, |
|
"loss": 1.8243, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7350771294433266, |
|
"grad_norm": 0.38160033679573113, |
|
"learning_rate": 4.4889973690310085e-05, |
|
"loss": 1.8186, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.7404426559356136, |
|
"grad_norm": 0.5253837731077495, |
|
"learning_rate": 4.480001917601185e-05, |
|
"loss": 1.9086, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7458081824279007, |
|
"grad_norm": 0.5010255463058967, |
|
"learning_rate": 4.470937159499029e-05, |
|
"loss": 1.9014, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.7511737089201878, |
|
"grad_norm": 0.44408252888412136, |
|
"learning_rate": 4.461803412021314e-05, |
|
"loss": 1.886, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7565392354124748, |
|
"grad_norm": 0.5189286834612553, |
|
"learning_rate": 4.4526009948796703e-05, |
|
"loss": 1.8266, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.7565392354124748, |
|
"eval_loss": 2.1618716716766357, |
|
"eval_runtime": 403.938, |
|
"eval_samples_per_second": 6.31, |
|
"eval_steps_per_second": 0.79, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.4334524920814895, |
|
"learning_rate": 4.4433302301893987e-05, |
|
"loss": 1.8863, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.7672702883970489, |
|
"grad_norm": 0.4809144135248221, |
|
"learning_rate": 4.433991442458188e-05, |
|
"loss": 1.9075, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.772635814889336, |
|
"grad_norm": 0.5258910256081301, |
|
"learning_rate": 4.4245849585747654e-05, |
|
"loss": 1.9616, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.778001341381623, |
|
"grad_norm": 0.5131536872616902, |
|
"learning_rate": 4.415111107797445e-05, |
|
"loss": 1.8953, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7833668678739101, |
|
"grad_norm": 0.45216150555891305, |
|
"learning_rate": 4.4055702217426084e-05, |
|
"loss": 1.8867, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.7887323943661971, |
|
"grad_norm": 0.47907057676705184, |
|
"learning_rate": 4.395962634373097e-05, |
|
"loss": 1.8335, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7940979208584842, |
|
"grad_norm": 0.4945502094720473, |
|
"learning_rate": 4.386288681986516e-05, |
|
"loss": 1.9218, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7994634473507712, |
|
"grad_norm": 0.4595838282230734, |
|
"learning_rate": 4.376548703203474e-05, |
|
"loss": 1.8603, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.8048289738430584, |
|
"grad_norm": 0.45448249598162854, |
|
"learning_rate": 4.36674303895572e-05, |
|
"loss": 1.8615, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8101945003353455, |
|
"grad_norm": 0.4238860646236377, |
|
"learning_rate": 4.356872032474213e-05, |
|
"loss": 1.8374, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8155600268276325, |
|
"grad_norm": 0.4519762198634726, |
|
"learning_rate": 4.34693602927711e-05, |
|
"loss": 1.8174, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.8209255533199196, |
|
"grad_norm": 0.45624612554196736, |
|
"learning_rate": 4.336935377157668e-05, |
|
"loss": 1.9105, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.8262910798122066, |
|
"grad_norm": 0.4076679531263349, |
|
"learning_rate": 4.326870426172075e-05, |
|
"loss": 1.8865, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.8316566063044937, |
|
"grad_norm": 0.4514412896189259, |
|
"learning_rate": 4.3167415286271905e-05, |
|
"loss": 1.9272, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8370221327967807, |
|
"grad_norm": 0.40433175857719245, |
|
"learning_rate": 4.3065490390682186e-05, |
|
"loss": 1.814, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.8423876592890678, |
|
"grad_norm": 0.4159285646615437, |
|
"learning_rate": 4.296293314266294e-05, |
|
"loss": 1.8064, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.8477531857813548, |
|
"grad_norm": 0.4270220599999327, |
|
"learning_rate": 4.2859747132060006e-05, |
|
"loss": 1.7482, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.8531187122736419, |
|
"grad_norm": 0.5162993182758251, |
|
"learning_rate": 4.275593597072796e-05, |
|
"loss": 1.7763, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.8584842387659289, |
|
"grad_norm": 0.4065368416357515, |
|
"learning_rate": 4.265150329240376e-05, |
|
"loss": 1.885, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.863849765258216, |
|
"grad_norm": 0.5523980888304064, |
|
"learning_rate": 4.2546452752579536e-05, |
|
"loss": 1.971, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.869215291750503, |
|
"grad_norm": 0.4121779650999884, |
|
"learning_rate": 4.2440788028374624e-05, |
|
"loss": 1.9301, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8745808182427901, |
|
"grad_norm": 0.4612759438023989, |
|
"learning_rate": 4.233451281840686e-05, |
|
"loss": 1.8564, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.8799463447350772, |
|
"grad_norm": 0.3977527412707747, |
|
"learning_rate": 4.2227630842663136e-05, |
|
"loss": 1.8876, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.8853118712273642, |
|
"grad_norm": 0.45276964005664955, |
|
"learning_rate": 4.212014584236914e-05, |
|
"loss": 1.8098, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.8906773977196513, |
|
"grad_norm": 0.37349476483277844, |
|
"learning_rate": 4.2012061579858465e-05, |
|
"loss": 1.8247, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.8960429242119383, |
|
"grad_norm": 0.39676844111499676, |
|
"learning_rate": 4.190338183844086e-05, |
|
"loss": 1.914, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.9014084507042254, |
|
"grad_norm": 0.3625691966854791, |
|
"learning_rate": 4.1794110422269825e-05, |
|
"loss": 1.92, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9067739771965124, |
|
"grad_norm": 0.39688352111910685, |
|
"learning_rate": 4.168425115620944e-05, |
|
"loss": 1.8103, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.9121395036887995, |
|
"grad_norm": 0.3737222783168723, |
|
"learning_rate": 4.157380788570053e-05, |
|
"loss": 1.8215, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9175050301810865, |
|
"grad_norm": 0.43842445138079555, |
|
"learning_rate": 4.146278447662597e-05, |
|
"loss": 1.8029, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.9228705566733736, |
|
"grad_norm": 0.47414715687042996, |
|
"learning_rate": 4.1351184815175456e-05, |
|
"loss": 1.8974, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.9282360831656606, |
|
"grad_norm": 0.4146539941665616, |
|
"learning_rate": 4.123901280770945e-05, |
|
"loss": 1.8871, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.9336016096579477, |
|
"grad_norm": 0.4088656394439229, |
|
"learning_rate": 4.112627238062239e-05, |
|
"loss": 1.8594, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.9389671361502347, |
|
"grad_norm": 0.40239314445747604, |
|
"learning_rate": 4.101296748020533e-05, |
|
"loss": 1.8207, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.9443326626425218, |
|
"grad_norm": 0.36389736696587754, |
|
"learning_rate": 4.089910207250778e-05, |
|
"loss": 1.8423, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.9496981891348089, |
|
"grad_norm": 0.41161031740209597, |
|
"learning_rate": 4.0784680143198836e-05, |
|
"loss": 1.8954, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.9550637156270959, |
|
"grad_norm": 0.3613771597005119, |
|
"learning_rate": 4.0669705697427754e-05, |
|
"loss": 1.9036, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.960429242119383, |
|
"grad_norm": 0.36880689305175635, |
|
"learning_rate": 4.055418275968368e-05, |
|
"loss": 1.8676, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.96579476861167, |
|
"grad_norm": 0.3892009514188599, |
|
"learning_rate": 4.04381153736548e-05, |
|
"loss": 1.8545, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.9711602951039571, |
|
"grad_norm": 0.39386413560247385, |
|
"learning_rate": 4.032150760208684e-05, |
|
"loss": 1.9306, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.9765258215962441, |
|
"grad_norm": 0.38085985815329654, |
|
"learning_rate": 4.02043635266408e-05, |
|
"loss": 1.9339, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.9818913480885312, |
|
"grad_norm": 0.3502551322824309, |
|
"learning_rate": 4.00866872477501e-05, |
|
"loss": 1.8438, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.9872568745808182, |
|
"grad_norm": 0.42923155491093273, |
|
"learning_rate": 3.9968482884477075e-05, |
|
"loss": 1.8326, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.9926224010731053, |
|
"grad_norm": 0.37629854526288287, |
|
"learning_rate": 3.9849754574368766e-05, |
|
"loss": 1.8521, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.9979879275653923, |
|
"grad_norm": 0.43648587047407056, |
|
"learning_rate": 3.973050647331209e-05, |
|
"loss": 1.9192, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.0053655264922872, |
|
"grad_norm": 1.1037688215019685, |
|
"learning_rate": 3.9610742755388406e-05, |
|
"loss": 3.5179, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.010731052984574, |
|
"grad_norm": 0.5315167109100908, |
|
"learning_rate": 3.949046761272736e-05, |
|
"loss": 1.5997, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.010731052984574, |
|
"eval_loss": 2.160255193710327, |
|
"eval_runtime": 403.1754, |
|
"eval_samples_per_second": 6.322, |
|
"eval_steps_per_second": 0.791, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.0160965794768613, |
|
"grad_norm": 1.034163349005432, |
|
"learning_rate": 3.9369685255360175e-05, |
|
"loss": 1.6376, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.0214621059691482, |
|
"grad_norm": 0.5297370695526916, |
|
"learning_rate": 3.924839991107229e-05, |
|
"loss": 1.5196, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0268276324614354, |
|
"grad_norm": 0.49978902813452, |
|
"learning_rate": 3.9126615825255364e-05, |
|
"loss": 1.4628, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.0321931589537223, |
|
"grad_norm": 0.5207450843338, |
|
"learning_rate": 3.900433726075865e-05, |
|
"loss": 1.5808, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.0375586854460095, |
|
"grad_norm": 0.5502855697944308, |
|
"learning_rate": 3.888156849773985e-05, |
|
"loss": 1.5445, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.0429242119382964, |
|
"grad_norm": 0.5175112784168553, |
|
"learning_rate": 3.875831383351519e-05, |
|
"loss": 1.6456, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.0482897384305836, |
|
"grad_norm": 0.4649959629509902, |
|
"learning_rate": 3.863457758240912e-05, |
|
"loss": 1.5508, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.0536552649228705, |
|
"grad_norm": 0.48136239187851326, |
|
"learning_rate": 3.851036407560319e-05, |
|
"loss": 1.4848, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.0590207914151577, |
|
"grad_norm": 0.48389312973032594, |
|
"learning_rate": 3.838567766098452e-05, |
|
"loss": 1.5123, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.0643863179074446, |
|
"grad_norm": 0.45180450785183063, |
|
"learning_rate": 3.826052270299356e-05, |
|
"loss": 1.5807, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.0697518443997318, |
|
"grad_norm": 0.48079670948977926, |
|
"learning_rate": 3.813490358247137e-05, |
|
"loss": 1.5426, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.0751173708920188, |
|
"grad_norm": 0.510291013526601, |
|
"learning_rate": 3.800882469650621e-05, |
|
"loss": 1.5845, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.080482897384306, |
|
"grad_norm": 0.41649615075440893, |
|
"learning_rate": 3.78822904582797e-05, |
|
"loss": 1.5003, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.0858484238765929, |
|
"grad_norm": 0.4499681852447958, |
|
"learning_rate": 3.7755305296912276e-05, |
|
"loss": 1.4751, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.09121395036888, |
|
"grad_norm": 0.5088284534955508, |
|
"learning_rate": 3.762787365730821e-05, |
|
"loss": 1.5982, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.096579476861167, |
|
"grad_norm": 0.3929831451008196, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.4991, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.1019450033534541, |
|
"grad_norm": 0.41107315409379536, |
|
"learning_rate": 3.7371688800992235e-05, |
|
"loss": 1.4837, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.107310529845741, |
|
"grad_norm": 0.415588978346341, |
|
"learning_rate": 3.7242944551604914e-05, |
|
"loss": 1.5345, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.1126760563380282, |
|
"grad_norm": 0.4044749735529877, |
|
"learning_rate": 3.711377175831626e-05, |
|
"loss": 1.4545, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.1180415828303152, |
|
"grad_norm": 0.44865460732176465, |
|
"learning_rate": 3.698417494260494e-05, |
|
"loss": 1.6391, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.1234071093226023, |
|
"grad_norm": 0.4479424002899852, |
|
"learning_rate": 3.685415864079185e-05, |
|
"loss": 1.508, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.1287726358148893, |
|
"grad_norm": 0.429927217929734, |
|
"learning_rate": 3.6723727403881284e-05, |
|
"loss": 1.5501, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.1341381623071765, |
|
"grad_norm": 0.5044115066105113, |
|
"learning_rate": 3.659288579740163e-05, |
|
"loss": 1.5615, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.1395036887994634, |
|
"grad_norm": 0.424575600191598, |
|
"learning_rate": 3.646163840124561e-05, |
|
"loss": 1.4835, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.1448692152917506, |
|
"grad_norm": 0.43480586168977786, |
|
"learning_rate": 3.632998980950993e-05, |
|
"loss": 1.5371, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.1502347417840375, |
|
"grad_norm": 0.40322150224409975, |
|
"learning_rate": 3.619794463033447e-05, |
|
"loss": 1.5376, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.1556002682763247, |
|
"grad_norm": 0.42637706485283583, |
|
"learning_rate": 3.6065507485741e-05, |
|
"loss": 1.5631, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.1609657947686116, |
|
"grad_norm": 0.4259143981435151, |
|
"learning_rate": 3.593268301147139e-05, |
|
"loss": 1.5741, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.1663313212608988, |
|
"grad_norm": 0.45826011738708783, |
|
"learning_rate": 3.5799475856825326e-05, |
|
"loss": 1.5298, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.1716968477531857, |
|
"grad_norm": 0.4030875431169561, |
|
"learning_rate": 3.566589068449761e-05, |
|
"loss": 1.5574, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.1770623742454729, |
|
"grad_norm": 0.409604426981432, |
|
"learning_rate": 3.5531932170414896e-05, |
|
"loss": 1.4859, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.1824279007377598, |
|
"grad_norm": 0.37439697455127857, |
|
"learning_rate": 3.539760500357207e-05, |
|
"loss": 1.5221, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.187793427230047, |
|
"grad_norm": 0.3600455315943667, |
|
"learning_rate": 3.5262913885868066e-05, |
|
"loss": 1.479, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.193158953722334, |
|
"grad_norm": 0.39515964344444054, |
|
"learning_rate": 3.512786353194134e-05, |
|
"loss": 1.6002, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.198524480214621, |
|
"grad_norm": 0.3903215407425732, |
|
"learning_rate": 3.49924586690048e-05, |
|
"loss": 1.5627, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.203890006706908, |
|
"grad_norm": 0.3863928913911809, |
|
"learning_rate": 3.485670403668036e-05, |
|
"loss": 1.4894, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.2092555331991952, |
|
"grad_norm": 0.39120322752660747, |
|
"learning_rate": 3.472060438683302e-05, |
|
"loss": 1.5576, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2146210596914822, |
|
"grad_norm": 0.4012276022035902, |
|
"learning_rate": 3.4584164483404544e-05, |
|
"loss": 1.5661, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.2199865861837693, |
|
"grad_norm": 0.359333036872216, |
|
"learning_rate": 3.444738910224671e-05, |
|
"loss": 1.6043, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.2253521126760563, |
|
"grad_norm": 0.4169926695855886, |
|
"learning_rate": 3.431028303095415e-05, |
|
"loss": 1.5162, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.2307176391683434, |
|
"grad_norm": 0.3666243644496504, |
|
"learning_rate": 3.417285106869673e-05, |
|
"loss": 1.5182, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.2360831656606304, |
|
"grad_norm": 0.3732640706992433, |
|
"learning_rate": 3.403509802605159e-05, |
|
"loss": 1.6136, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.2414486921529175, |
|
"grad_norm": 0.38533992909636827, |
|
"learning_rate": 3.389702872483477e-05, |
|
"loss": 1.5763, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.2468142186452045, |
|
"grad_norm": 0.3621197211070038, |
|
"learning_rate": 3.3758647997932417e-05, |
|
"loss": 1.5977, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.2521797451374916, |
|
"grad_norm": 0.39624430912627595, |
|
"learning_rate": 3.361996068913159e-05, |
|
"loss": 1.5794, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.2575452716297786, |
|
"grad_norm": 0.38404789088021773, |
|
"learning_rate": 3.348097165295076e-05, |
|
"loss": 1.5633, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.2629107981220657, |
|
"grad_norm": 0.35832957307552504, |
|
"learning_rate": 3.3341685754469856e-05, |
|
"loss": 1.5134, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.2629107981220657, |
|
"eval_loss": 2.15519642829895, |
|
"eval_runtime": 403.4218, |
|
"eval_samples_per_second": 6.318, |
|
"eval_steps_per_second": 0.791, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.268276324614353, |
|
"grad_norm": 0.34366404066845024, |
|
"learning_rate": 3.320210786915997e-05, |
|
"loss": 1.4825, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.2736418511066399, |
|
"grad_norm": 0.45636968737684863, |
|
"learning_rate": 3.3062242882712724e-05, |
|
"loss": 1.5146, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.2790073775989268, |
|
"grad_norm": 0.3804000236233779, |
|
"learning_rate": 3.2922095690869224e-05, |
|
"loss": 1.5501, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.284372904091214, |
|
"grad_norm": 0.3699794774550946, |
|
"learning_rate": 3.278167119924872e-05, |
|
"loss": 1.4956, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.2897384305835011, |
|
"grad_norm": 0.38134052511434596, |
|
"learning_rate": 3.2640974323176846e-05, |
|
"loss": 1.4926, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.295103957075788, |
|
"grad_norm": 0.4001577679575819, |
|
"learning_rate": 3.2500009987513655e-05, |
|
"loss": 1.5339, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.300469483568075, |
|
"grad_norm": 0.33599496371938614, |
|
"learning_rate": 3.235878312648112e-05, |
|
"loss": 1.3329, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.3058350100603622, |
|
"grad_norm": 0.3780832169945631, |
|
"learning_rate": 3.2217298683490525e-05, |
|
"loss": 1.5711, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.3112005365526493, |
|
"grad_norm": 0.37979912787014874, |
|
"learning_rate": 3.207556161096935e-05, |
|
"loss": 1.559, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.3165660630449363, |
|
"grad_norm": 0.34013678855872914, |
|
"learning_rate": 3.193357687018798e-05, |
|
"loss": 1.5112, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.3219315895372232, |
|
"grad_norm": 0.3761770494247479, |
|
"learning_rate": 3.179134943108597e-05, |
|
"loss": 1.5195, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.3272971160295104, |
|
"grad_norm": 0.36046502801340735, |
|
"learning_rate": 3.164888427209818e-05, |
|
"loss": 1.4648, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.3326626425217976, |
|
"grad_norm": 0.4163102856970103, |
|
"learning_rate": 3.150618637998041e-05, |
|
"loss": 1.6488, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.3380281690140845, |
|
"grad_norm": 0.37250576413738173, |
|
"learning_rate": 3.136326074963494e-05, |
|
"loss": 1.5966, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.3433936955063714, |
|
"grad_norm": 0.3717314773044194, |
|
"learning_rate": 3.122011238393562e-05, |
|
"loss": 1.4555, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.3487592219986586, |
|
"grad_norm": 0.4233051858413458, |
|
"learning_rate": 3.1076746293552786e-05, |
|
"loss": 1.5931, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.3541247484909458, |
|
"grad_norm": 0.3538900779699424, |
|
"learning_rate": 3.093316749677788e-05, |
|
"loss": 1.5041, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.3594902749832327, |
|
"grad_norm": 0.4239252371757693, |
|
"learning_rate": 3.078938101934773e-05, |
|
"loss": 1.5986, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.3648558014755197, |
|
"grad_norm": 0.40455165331708126, |
|
"learning_rate": 3.064539189426874e-05, |
|
"loss": 1.5956, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.3702213279678068, |
|
"grad_norm": 0.35682161127333073, |
|
"learning_rate": 3.050120516164062e-05, |
|
"loss": 1.4983, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.375586854460094, |
|
"grad_norm": 0.366685178903664, |
|
"learning_rate": 3.0356825868480017e-05, |
|
"loss": 1.6384, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.380952380952381, |
|
"grad_norm": 0.3615423341669086, |
|
"learning_rate": 3.0212259068543837e-05, |
|
"loss": 1.4734, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.3863179074446679, |
|
"grad_norm": 0.3469703962175405, |
|
"learning_rate": 3.006750982215234e-05, |
|
"loss": 1.4386, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.391683433936955, |
|
"grad_norm": 0.3757992095280783, |
|
"learning_rate": 2.9922583196012037e-05, |
|
"loss": 1.5479, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.3970489604292422, |
|
"grad_norm": 0.3696529318558144, |
|
"learning_rate": 2.9777484263038306e-05, |
|
"loss": 1.4613, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.4024144869215291, |
|
"grad_norm": 0.36763443681114544, |
|
"learning_rate": 2.9632218102177862e-05, |
|
"loss": 1.4707, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.4077800134138163, |
|
"grad_norm": 0.3330227260573098, |
|
"learning_rate": 2.9486789798230917e-05, |
|
"loss": 1.5196, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.4131455399061033, |
|
"grad_norm": 0.3401075705229897, |
|
"learning_rate": 2.9341204441673266e-05, |
|
"loss": 1.5713, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.4185110663983904, |
|
"grad_norm": 0.38589937071982083, |
|
"learning_rate": 2.9195467128478044e-05, |
|
"loss": 1.5658, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.4238765928906774, |
|
"grad_norm": 0.36187499541792045, |
|
"learning_rate": 2.9049582959937392e-05, |
|
"loss": 1.5645, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.4292421193829645, |
|
"grad_norm": 0.3727884692802974, |
|
"learning_rate": 2.8903557042483887e-05, |
|
"loss": 1.5195, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.4346076458752515, |
|
"grad_norm": 0.3423144867083204, |
|
"learning_rate": 2.875739448751176e-05, |
|
"loss": 1.4897, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.4399731723675386, |
|
"grad_norm": 0.3809610047831763, |
|
"learning_rate": 2.8611100411198037e-05, |
|
"loss": 1.6107, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.4453386988598256, |
|
"grad_norm": 0.3466660038008262, |
|
"learning_rate": 2.8464679934323424e-05, |
|
"loss": 1.5203, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.4507042253521127, |
|
"grad_norm": 0.3858219936970091, |
|
"learning_rate": 2.8318138182093052e-05, |
|
"loss": 1.5211, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.4560697518443997, |
|
"grad_norm": 0.3457606288538911, |
|
"learning_rate": 2.8171480283957118e-05, |
|
"loss": 1.4825, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.4614352783366868, |
|
"grad_norm": 0.4097169980235964, |
|
"learning_rate": 2.80247113734313e-05, |
|
"loss": 1.5276, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.4668008048289738, |
|
"grad_norm": 0.39937404860513825, |
|
"learning_rate": 2.7877836587917072e-05, |
|
"loss": 1.5022, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.472166331321261, |
|
"grad_norm": 0.36070057912441533, |
|
"learning_rate": 2.773086106852192e-05, |
|
"loss": 1.4587, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.477531857813548, |
|
"grad_norm": 0.4013616820391148, |
|
"learning_rate": 2.7583789959879303e-05, |
|
"loss": 1.5908, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.482897384305835, |
|
"grad_norm": 0.4150531718746046, |
|
"learning_rate": 2.7436628409968664e-05, |
|
"loss": 1.4511, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.488262910798122, |
|
"grad_norm": 0.38578622280087393, |
|
"learning_rate": 2.728938156993517e-05, |
|
"loss": 1.5407, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.4936284372904092, |
|
"grad_norm": 0.3844783258031986, |
|
"learning_rate": 2.7142054593909422e-05, |
|
"loss": 1.5349, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.4989939637826961, |
|
"grad_norm": 0.4144431176495637, |
|
"learning_rate": 2.6994652638827078e-05, |
|
"loss": 1.602, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.5043594902749833, |
|
"grad_norm": 0.402711914294424, |
|
"learning_rate": 2.6847180864248283e-05, |
|
"loss": 1.5902, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.5097250167672702, |
|
"grad_norm": 0.3378799881196754, |
|
"learning_rate": 2.6699644432177112e-05, |
|
"loss": 1.5514, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.5150905432595574, |
|
"grad_norm": 0.4113537256498264, |
|
"learning_rate": 2.655204850688085e-05, |
|
"loss": 1.4614, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.5150905432595574, |
|
"eval_loss": 2.1373159885406494, |
|
"eval_runtime": 403.7482, |
|
"eval_samples_per_second": 6.313, |
|
"eval_steps_per_second": 0.79, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.5204560697518446, |
|
"grad_norm": 0.3645339916373801, |
|
"learning_rate": 2.6404398254709284e-05, |
|
"loss": 1.4716, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.5258215962441315, |
|
"grad_norm": 0.3662916121671904, |
|
"learning_rate": 2.625669884391377e-05, |
|
"loss": 1.5145, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.5311871227364184, |
|
"grad_norm": 0.4062086365442157, |
|
"learning_rate": 2.610895544446641e-05, |
|
"loss": 1.5513, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.5365526492287056, |
|
"grad_norm": 0.3693755738805308, |
|
"learning_rate": 2.596117322787907e-05, |
|
"loss": 1.5498, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.5419181757209928, |
|
"grad_norm": 0.3589026792727439, |
|
"learning_rate": 2.5813357367022305e-05, |
|
"loss": 1.5211, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.5472837022132797, |
|
"grad_norm": 0.38017444587432575, |
|
"learning_rate": 2.566551303594437e-05, |
|
"loss": 1.5342, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.5526492287055667, |
|
"grad_norm": 0.3537300073106448, |
|
"learning_rate": 2.551764540969005e-05, |
|
"loss": 1.5109, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.5580147551978538, |
|
"grad_norm": 0.3752836981822812, |
|
"learning_rate": 2.5369759664119537e-05, |
|
"loss": 1.5015, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.563380281690141, |
|
"grad_norm": 0.3810876452684417, |
|
"learning_rate": 2.5221860975727275e-05, |
|
"loss": 1.5686, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.568745808182428, |
|
"grad_norm": 0.3688966465073692, |
|
"learning_rate": 2.5073954521460745e-05, |
|
"loss": 1.5666, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.5741113346747149, |
|
"grad_norm": 0.3740373962224673, |
|
"learning_rate": 2.4926045478539257e-05, |
|
"loss": 1.5541, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.579476861167002, |
|
"grad_norm": 0.4415523054571514, |
|
"learning_rate": 2.4778139024272724e-05, |
|
"loss": 1.62, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.5848423876592892, |
|
"grad_norm": 0.3291219023047839, |
|
"learning_rate": 2.4630240335880462e-05, |
|
"loss": 1.5, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.5902079141515761, |
|
"grad_norm": 0.39622699819562734, |
|
"learning_rate": 2.4482354590309962e-05, |
|
"loss": 1.5358, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.595573440643863, |
|
"grad_norm": 0.35147156988608064, |
|
"learning_rate": 2.433448696405563e-05, |
|
"loss": 1.469, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.6009389671361502, |
|
"grad_norm": 0.3458967842322882, |
|
"learning_rate": 2.4186642632977697e-05, |
|
"loss": 1.5368, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.6063044936284374, |
|
"grad_norm": 0.45919555248078225, |
|
"learning_rate": 2.4038826772120932e-05, |
|
"loss": 1.6126, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.6116700201207244, |
|
"grad_norm": 0.397298560652149, |
|
"learning_rate": 2.3891044555533588e-05, |
|
"loss": 1.5273, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.6170355466130113, |
|
"grad_norm": 0.33950881912700964, |
|
"learning_rate": 2.3743301156086244e-05, |
|
"loss": 1.5844, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.6224010731052985, |
|
"grad_norm": 0.3685147478174407, |
|
"learning_rate": 2.359560174529073e-05, |
|
"loss": 1.5306, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.6277665995975856, |
|
"grad_norm": 0.3668847123777601, |
|
"learning_rate": 2.3447951493119152e-05, |
|
"loss": 1.5114, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.6331321260898726, |
|
"grad_norm": 0.3532238202377609, |
|
"learning_rate": 2.3300355567822897e-05, |
|
"loss": 1.5568, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.6384976525821595, |
|
"grad_norm": 0.3810645993344603, |
|
"learning_rate": 2.3152819135751722e-05, |
|
"loss": 1.5121, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.6438631790744467, |
|
"grad_norm": 0.37909917013988087, |
|
"learning_rate": 2.300534736117292e-05, |
|
"loss": 1.4904, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.6492287055667338, |
|
"grad_norm": 0.38885366059907706, |
|
"learning_rate": 2.285794540609058e-05, |
|
"loss": 1.5852, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.6545942320590208, |
|
"grad_norm": 0.3657756765079751, |
|
"learning_rate": 2.2710618430064843e-05, |
|
"loss": 1.5529, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.6599597585513077, |
|
"grad_norm": 0.3397163303578702, |
|
"learning_rate": 2.256337159003134e-05, |
|
"loss": 1.4903, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.665325285043595, |
|
"grad_norm": 0.39777082501030225, |
|
"learning_rate": 2.2416210040120703e-05, |
|
"loss": 1.5159, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.670690811535882, |
|
"grad_norm": 0.3658755281773237, |
|
"learning_rate": 2.2269138931478084e-05, |
|
"loss": 1.5848, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.676056338028169, |
|
"grad_norm": 0.42665798217406753, |
|
"learning_rate": 2.2122163412082927e-05, |
|
"loss": 1.6133, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.681421864520456, |
|
"grad_norm": 0.40969972656814363, |
|
"learning_rate": 2.1975288626568713e-05, |
|
"loss": 1.6264, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.686787391012743, |
|
"grad_norm": 0.3283642172163527, |
|
"learning_rate": 2.1828519716042888e-05, |
|
"loss": 1.4812, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.6921529175050303, |
|
"grad_norm": 0.4058354316614303, |
|
"learning_rate": 2.1681861817906954e-05, |
|
"loss": 1.4827, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.6975184439973172, |
|
"grad_norm": 0.3466585039953347, |
|
"learning_rate": 2.153532006567658e-05, |
|
"loss": 1.5768, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.7028839704896042, |
|
"grad_norm": 0.33329283442783797, |
|
"learning_rate": 2.1388899588801965e-05, |
|
"loss": 1.577, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.7082494969818913, |
|
"grad_norm": 0.3513005805125947, |
|
"learning_rate": 2.1242605512488248e-05, |
|
"loss": 1.5835, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.7136150234741785, |
|
"grad_norm": 0.3294934417641562, |
|
"learning_rate": 2.109644295751612e-05, |
|
"loss": 1.4758, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.7189805499664654, |
|
"grad_norm": 0.33415233723527016, |
|
"learning_rate": 2.095041704006261e-05, |
|
"loss": 1.4853, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.7243460764587524, |
|
"grad_norm": 0.3283653112019817, |
|
"learning_rate": 2.080453287152196e-05, |
|
"loss": 1.4554, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.7297116029510395, |
|
"grad_norm": 0.32083254638693504, |
|
"learning_rate": 2.0658795558326743e-05, |
|
"loss": 1.5284, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.7350771294433267, |
|
"grad_norm": 0.37794249417672565, |
|
"learning_rate": 2.0513210201769085e-05, |
|
"loss": 1.5521, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.7404426559356136, |
|
"grad_norm": 0.3431657733658665, |
|
"learning_rate": 2.0367781897822147e-05, |
|
"loss": 1.5067, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.7458081824279006, |
|
"grad_norm": 0.33934139214309345, |
|
"learning_rate": 2.0222515736961696e-05, |
|
"loss": 1.4801, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.7511737089201878, |
|
"grad_norm": 0.3795356143714155, |
|
"learning_rate": 2.0077416803987965e-05, |
|
"loss": 1.6264, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.756539235412475, |
|
"grad_norm": 0.3507795265759812, |
|
"learning_rate": 1.993249017784766e-05, |
|
"loss": 1.4967, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.7619047619047619, |
|
"grad_norm": 0.3648489232026123, |
|
"learning_rate": 1.9787740931456165e-05, |
|
"loss": 1.5561, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.7672702883970488, |
|
"grad_norm": 0.39626816117060937, |
|
"learning_rate": 1.9643174131519986e-05, |
|
"loss": 1.6362, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.7672702883970488, |
|
"eval_loss": 2.1242423057556152, |
|
"eval_runtime": 403.1029, |
|
"eval_samples_per_second": 6.323, |
|
"eval_steps_per_second": 0.791, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.772635814889336, |
|
"grad_norm": 0.35313890178416246, |
|
"learning_rate": 1.949879483835939e-05, |
|
"loss": 1.5147, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.7780013413816231, |
|
"grad_norm": 0.3269413977690858, |
|
"learning_rate": 1.935460810573127e-05, |
|
"loss": 1.5032, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.78336686787391, |
|
"grad_norm": 0.3521215800086969, |
|
"learning_rate": 1.9210618980652277e-05, |
|
"loss": 1.5734, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.788732394366197, |
|
"grad_norm": 0.3813859592452455, |
|
"learning_rate": 1.9066832503222128e-05, |
|
"loss": 1.4488, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.7940979208584842, |
|
"grad_norm": 0.31690523060028536, |
|
"learning_rate": 1.892325370644721e-05, |
|
"loss": 1.432, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.7994634473507714, |
|
"grad_norm": 0.3364023400710833, |
|
"learning_rate": 1.8779887616064383e-05, |
|
"loss": 1.4871, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.8048289738430583, |
|
"grad_norm": 0.3350534986332929, |
|
"learning_rate": 1.863673925036506e-05, |
|
"loss": 1.5055, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.8101945003353455, |
|
"grad_norm": 0.33398900975140067, |
|
"learning_rate": 1.8493813620019594e-05, |
|
"loss": 1.5005, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.8155600268276326, |
|
"grad_norm": 0.31906353499540524, |
|
"learning_rate": 1.835111572790183e-05, |
|
"loss": 1.4626, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.8209255533199196, |
|
"grad_norm": 0.34716461667960885, |
|
"learning_rate": 1.8208650568914033e-05, |
|
"loss": 1.509, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.8262910798122065, |
|
"grad_norm": 0.34582309441917697, |
|
"learning_rate": 1.8066423129812027e-05, |
|
"loss": 1.4824, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.8316566063044937, |
|
"grad_norm": 0.32325626999744855, |
|
"learning_rate": 1.792443838903065e-05, |
|
"loss": 1.4303, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.8370221327967808, |
|
"grad_norm": 0.32008576746121775, |
|
"learning_rate": 1.778270131650948e-05, |
|
"loss": 1.6245, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.8423876592890678, |
|
"grad_norm": 0.37457992451894306, |
|
"learning_rate": 1.7641216873518878e-05, |
|
"loss": 1.4618, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.8477531857813547, |
|
"grad_norm": 0.35750145316045384, |
|
"learning_rate": 1.749999001248635e-05, |
|
"loss": 1.5959, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.8531187122736419, |
|
"grad_norm": 0.3306845841796733, |
|
"learning_rate": 1.735902567682315e-05, |
|
"loss": 1.5816, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.858484238765929, |
|
"grad_norm": 0.38970827119176255, |
|
"learning_rate": 1.7218328800751288e-05, |
|
"loss": 1.5529, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.863849765258216, |
|
"grad_norm": 0.3217477415509899, |
|
"learning_rate": 1.7077904309130782e-05, |
|
"loss": 1.5559, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.869215291750503, |
|
"grad_norm": 0.36288548537121584, |
|
"learning_rate": 1.6937757117287278e-05, |
|
"loss": 1.4984, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.87458081824279, |
|
"grad_norm": 0.3460179189739247, |
|
"learning_rate": 1.6797892130840036e-05, |
|
"loss": 1.5385, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.8799463447350773, |
|
"grad_norm": 0.31554481787674976, |
|
"learning_rate": 1.665831424553015e-05, |
|
"loss": 1.504, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.8853118712273642, |
|
"grad_norm": 0.30549498763968225, |
|
"learning_rate": 1.651902834704924e-05, |
|
"loss": 1.5228, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.8906773977196512, |
|
"grad_norm": 0.3277064150411407, |
|
"learning_rate": 1.6380039310868416e-05, |
|
"loss": 1.4852, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.8960429242119383, |
|
"grad_norm": 0.31968263622304366, |
|
"learning_rate": 1.624135200206759e-05, |
|
"loss": 1.4611, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.9014084507042255, |
|
"grad_norm": 0.32902733969646153, |
|
"learning_rate": 1.6102971275165228e-05, |
|
"loss": 1.4833, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.9067739771965124, |
|
"grad_norm": 0.3299428742675132, |
|
"learning_rate": 1.596490197394841e-05, |
|
"loss": 1.4439, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.9121395036887994, |
|
"grad_norm": 0.34500427705894593, |
|
"learning_rate": 1.5827148931303277e-05, |
|
"loss": 1.5616, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.9175050301810865, |
|
"grad_norm": 0.32458276939765035, |
|
"learning_rate": 1.5689716969045848e-05, |
|
"loss": 1.4334, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.9228705566733737, |
|
"grad_norm": 0.3342429982300284, |
|
"learning_rate": 1.5552610897753292e-05, |
|
"loss": 1.5311, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.9282360831656606, |
|
"grad_norm": 0.3483438918440925, |
|
"learning_rate": 1.5415835516595465e-05, |
|
"loss": 1.4642, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.9336016096579476, |
|
"grad_norm": 0.324692472057597, |
|
"learning_rate": 1.5279395613166986e-05, |
|
"loss": 1.5336, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.9389671361502347, |
|
"grad_norm": 0.4011901483013197, |
|
"learning_rate": 1.5143295963319643e-05, |
|
"loss": 1.5634, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.944332662642522, |
|
"grad_norm": 0.3791986336861211, |
|
"learning_rate": 1.5007541330995197e-05, |
|
"loss": 1.5722, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.9496981891348089, |
|
"grad_norm": 0.3318324328550346, |
|
"learning_rate": 1.4872136468058661e-05, |
|
"loss": 1.6214, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.9550637156270958, |
|
"grad_norm": 0.3865281831290857, |
|
"learning_rate": 1.4737086114131943e-05, |
|
"loss": 1.548, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.960429242119383, |
|
"grad_norm": 0.3317269613634914, |
|
"learning_rate": 1.4602394996427942e-05, |
|
"loss": 1.5024, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.9657947686116701, |
|
"grad_norm": 0.36387655313468187, |
|
"learning_rate": 1.4468067829585108e-05, |
|
"loss": 1.5256, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.971160295103957, |
|
"grad_norm": 0.3494371010379711, |
|
"learning_rate": 1.4334109315502395e-05, |
|
"loss": 1.5559, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.976525821596244, |
|
"grad_norm": 0.36135083308665883, |
|
"learning_rate": 1.4200524143174677e-05, |
|
"loss": 1.5669, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.9818913480885312, |
|
"grad_norm": 0.34824673781683024, |
|
"learning_rate": 1.4067316988528617e-05, |
|
"loss": 1.6642, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.9872568745808183, |
|
"grad_norm": 0.36134664903507996, |
|
"learning_rate": 1.3934492514259003e-05, |
|
"loss": 1.5003, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.9926224010731053, |
|
"grad_norm": 0.3315306163140975, |
|
"learning_rate": 1.3802055369665534e-05, |
|
"loss": 1.3959, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.9979879275653922, |
|
"grad_norm": 0.3179771784326003, |
|
"learning_rate": 1.3670010190490073e-05, |
|
"loss": 1.5285, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.005365526492287, |
|
"grad_norm": 0.9915595865019317, |
|
"learning_rate": 1.3538361598754384e-05, |
|
"loss": 2.456, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.0107310529845743, |
|
"grad_norm": 0.6291566214590382, |
|
"learning_rate": 1.3407114202598369e-05, |
|
"loss": 1.1708, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.0160965794768613, |
|
"grad_norm": 0.5593895609190137, |
|
"learning_rate": 1.327627259611873e-05, |
|
"loss": 1.1864, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.021462105969148, |
|
"grad_norm": 0.4205563210949494, |
|
"learning_rate": 1.314584135920815e-05, |
|
"loss": 1.1374, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.021462105969148, |
|
"eval_loss": 2.232717752456665, |
|
"eval_runtime": 403.1787, |
|
"eval_samples_per_second": 6.322, |
|
"eval_steps_per_second": 0.791, |
|
"step": 376 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 558, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 47, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0096298209438597e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|