|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9923838537699923, |
|
"eval_steps": 500, |
|
"global_step": 218, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00913937547600914, |
|
"grad_norm": 5.803379058837891, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 0.7449, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01827875095201828, |
|
"grad_norm": 5.878463268280029, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.7528, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.027418126428027417, |
|
"grad_norm": 5.657175064086914, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 0.7296, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03655750190403656, |
|
"grad_norm": 5.441561698913574, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 0.7337, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0456968773800457, |
|
"grad_norm": 4.161638259887695, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.7005, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05483625285605483, |
|
"grad_norm": 2.691371202468872, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.666, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06397562833206398, |
|
"grad_norm": 2.3983771800994873, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.6625, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07311500380807312, |
|
"grad_norm": 4.262558460235596, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.6645, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08225437928408226, |
|
"grad_norm": 4.646894931793213, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.6624, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0913937547600914, |
|
"grad_norm": 3.8496811389923096, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.6186, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10053313023610053, |
|
"grad_norm": 4.285327911376953, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6059, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10967250571210967, |
|
"grad_norm": 3.3189685344696045, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.5763, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1188118811881188, |
|
"grad_norm": 2.195256233215332, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 0.5776, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12795125666412796, |
|
"grad_norm": 2.200354814529419, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 0.5703, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1370906321401371, |
|
"grad_norm": 3.10870099067688, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.5697, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14623000761614624, |
|
"grad_norm": 2.311767101287842, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 0.5148, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.15536938309215537, |
|
"grad_norm": 1.341518759727478, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 0.5402, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16450875856816452, |
|
"grad_norm": 1.3586676120758057, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 0.5223, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.17364813404417365, |
|
"grad_norm": 1.9843041896820068, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 0.4927, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1827875095201828, |
|
"grad_norm": 1.5958917140960693, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.5186, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.19192688499619193, |
|
"grad_norm": 0.8092266321182251, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 0.4834, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.20106626047220105, |
|
"grad_norm": 1.3990743160247803, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4879, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2102056359482102, |
|
"grad_norm": 1.377229928970337, |
|
"learning_rate": 1.9998715457999313e-05, |
|
"loss": 0.493, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21934501142421933, |
|
"grad_norm": 0.9455161094665527, |
|
"learning_rate": 1.999486216200688e-05, |
|
"loss": 0.477, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2284843869002285, |
|
"grad_norm": 0.8552016615867615, |
|
"learning_rate": 1.9988441101966807e-05, |
|
"loss": 0.4986, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2376237623762376, |
|
"grad_norm": 0.9342105984687805, |
|
"learning_rate": 1.9979453927503366e-05, |
|
"loss": 0.5122, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.24676313785224677, |
|
"grad_norm": 0.8206979632377625, |
|
"learning_rate": 1.9967902947497158e-05, |
|
"loss": 0.4918, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2559025133282559, |
|
"grad_norm": 0.8245558142662048, |
|
"learning_rate": 1.9953791129491985e-05, |
|
"loss": 0.4861, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.26504188880426505, |
|
"grad_norm": 0.6446841359138489, |
|
"learning_rate": 1.9937122098932428e-05, |
|
"loss": 0.4633, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2741812642802742, |
|
"grad_norm": 0.7485717535018921, |
|
"learning_rate": 1.991790013823246e-05, |
|
"loss": 0.4867, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2833206397562833, |
|
"grad_norm": 0.8434760570526123, |
|
"learning_rate": 1.9896130185675263e-05, |
|
"loss": 0.4833, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2924600152322925, |
|
"grad_norm": 0.6975553631782532, |
|
"learning_rate": 1.9871817834144506e-05, |
|
"loss": 0.4642, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3015993907083016, |
|
"grad_norm": 0.7539258003234863, |
|
"learning_rate": 1.9844969329687526e-05, |
|
"loss": 0.4796, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.31073876618431073, |
|
"grad_norm": 0.661018967628479, |
|
"learning_rate": 1.9815591569910654e-05, |
|
"loss": 0.4542, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.31987814166031986, |
|
"grad_norm": 0.7433336973190308, |
|
"learning_rate": 1.9783692102207156e-05, |
|
"loss": 0.4845, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.32901751713632904, |
|
"grad_norm": 0.6025668382644653, |
|
"learning_rate": 1.9749279121818235e-05, |
|
"loss": 0.424, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.33815689261233817, |
|
"grad_norm": 0.7638101577758789, |
|
"learning_rate": 1.971236146972764e-05, |
|
"loss": 0.4267, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3472962680883473, |
|
"grad_norm": 0.6455888748168945, |
|
"learning_rate": 1.9672948630390296e-05, |
|
"loss": 0.4232, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3564356435643564, |
|
"grad_norm": 0.7659748792648315, |
|
"learning_rate": 1.9631050729295705e-05, |
|
"loss": 0.4801, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3655750190403656, |
|
"grad_norm": 0.7797123193740845, |
|
"learning_rate": 1.9586678530366607e-05, |
|
"loss": 0.4514, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3747143945163747, |
|
"grad_norm": 0.5871541500091553, |
|
"learning_rate": 1.953984343319364e-05, |
|
"loss": 0.433, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.38385376999238385, |
|
"grad_norm": 0.668601930141449, |
|
"learning_rate": 1.949055747010669e-05, |
|
"loss": 0.4583, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.392993145468393, |
|
"grad_norm": 0.7493565082550049, |
|
"learning_rate": 1.9438833303083677e-05, |
|
"loss": 0.459, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4021325209444021, |
|
"grad_norm": 0.6386315226554871, |
|
"learning_rate": 1.9384684220497605e-05, |
|
"loss": 0.4239, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4112718964204113, |
|
"grad_norm": 0.7803364396095276, |
|
"learning_rate": 1.932812413370265e-05, |
|
"loss": 0.475, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4204112718964204, |
|
"grad_norm": 0.6278897523880005, |
|
"learning_rate": 1.926916757346022e-05, |
|
"loss": 0.433, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.42955064737242954, |
|
"grad_norm": 0.6488973498344421, |
|
"learning_rate": 1.9207829686205882e-05, |
|
"loss": 0.4262, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.43869002284843867, |
|
"grad_norm": 0.7175928950309753, |
|
"learning_rate": 1.9144126230158127e-05, |
|
"loss": 0.447, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.44782939832444785, |
|
"grad_norm": 0.7046955823898315, |
|
"learning_rate": 1.9078073571269922e-05, |
|
"loss": 0.4256, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.456968773800457, |
|
"grad_norm": 0.6800966262817383, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.4295, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4661081492764661, |
|
"grad_norm": 0.6840780973434448, |
|
"learning_rate": 1.8938989122074195e-05, |
|
"loss": 0.4383, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4752475247524752, |
|
"grad_norm": 0.85346519947052, |
|
"learning_rate": 1.8865993063730003e-05, |
|
"loss": 0.456, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4843869002284844, |
|
"grad_norm": 0.627661943435669, |
|
"learning_rate": 1.8790719257292175e-05, |
|
"loss": 0.4598, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.49352627570449353, |
|
"grad_norm": 0.865216851234436, |
|
"learning_rate": 1.8713187041233896e-05, |
|
"loss": 0.4547, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5026656511805027, |
|
"grad_norm": 0.7087027430534363, |
|
"learning_rate": 1.8633416334232754e-05, |
|
"loss": 0.4261, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5118050266565118, |
|
"grad_norm": 0.8220586180686951, |
|
"learning_rate": 1.8551427630053464e-05, |
|
"loss": 0.4589, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5209444021325209, |
|
"grad_norm": 0.6183983087539673, |
|
"learning_rate": 1.8467241992282842e-05, |
|
"loss": 0.4105, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5300837776085301, |
|
"grad_norm": 0.7341344952583313, |
|
"learning_rate": 1.8380881048918406e-05, |
|
"loss": 0.4435, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5392231530845393, |
|
"grad_norm": 0.86666339635849, |
|
"learning_rate": 1.8292366986811952e-05, |
|
"loss": 0.4148, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5483625285605483, |
|
"grad_norm": 0.6903016567230225, |
|
"learning_rate": 1.820172254596956e-05, |
|
"loss": 0.4356, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5575019040365575, |
|
"grad_norm": 0.7348374128341675, |
|
"learning_rate": 1.8108971013709512e-05, |
|
"loss": 0.419, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5666412795125666, |
|
"grad_norm": 0.6142125725746155, |
|
"learning_rate": 1.8014136218679566e-05, |
|
"loss": 0.4238, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5757806549885758, |
|
"grad_norm": 0.689704954624176, |
|
"learning_rate": 1.79172425247352e-05, |
|
"loss": 0.4208, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.584920030464585, |
|
"grad_norm": 0.6363848447799683, |
|
"learning_rate": 1.78183148246803e-05, |
|
"loss": 0.4212, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.594059405940594, |
|
"grad_norm": 0.6809417605400085, |
|
"learning_rate": 1.771737853387202e-05, |
|
"loss": 0.4396, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6031987814166032, |
|
"grad_norm": 0.6080987453460693, |
|
"learning_rate": 1.7614459583691346e-05, |
|
"loss": 0.4359, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6123381568926123, |
|
"grad_norm": 0.5947669744491577, |
|
"learning_rate": 1.7509584414881114e-05, |
|
"loss": 0.4328, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6214775323686215, |
|
"grad_norm": 0.6680779457092285, |
|
"learning_rate": 1.7402779970753156e-05, |
|
"loss": 0.4424, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6306169078446306, |
|
"grad_norm": 0.5437312722206116, |
|
"learning_rate": 1.7294073690266343e-05, |
|
"loss": 0.4445, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6397562833206397, |
|
"grad_norm": 0.5836828947067261, |
|
"learning_rate": 1.7183493500977277e-05, |
|
"loss": 0.4296, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6488956587966489, |
|
"grad_norm": 0.6663245558738708, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.4309, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6580350342726581, |
|
"grad_norm": 0.5611643195152283, |
|
"learning_rate": 1.6956825506034866e-05, |
|
"loss": 0.4192, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6671744097486672, |
|
"grad_norm": 0.63041752576828, |
|
"learning_rate": 1.6840795933293464e-05, |
|
"loss": 0.4201, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6763137852246763, |
|
"grad_norm": 0.6765708923339844, |
|
"learning_rate": 1.672300890261317e-05, |
|
"loss": 0.4121, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6854531607006854, |
|
"grad_norm": 0.5874064564704895, |
|
"learning_rate": 1.6603494674471595e-05, |
|
"loss": 0.4133, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6945925361766946, |
|
"grad_norm": 0.6408722996711731, |
|
"learning_rate": 1.6482283953077887e-05, |
|
"loss": 0.4423, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7037319116527038, |
|
"grad_norm": 0.6493033170700073, |
|
"learning_rate": 1.635940787848455e-05, |
|
"loss": 0.4148, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7128712871287128, |
|
"grad_norm": 0.6096062064170837, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.4189, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.722010662604722, |
|
"grad_norm": 0.639083981513977, |
|
"learning_rate": 1.6108786361015145e-05, |
|
"loss": 0.4076, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7311500380807312, |
|
"grad_norm": 0.611152708530426, |
|
"learning_rate": 1.598110530491216e-05, |
|
"loss": 0.4584, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7402894135567403, |
|
"grad_norm": 0.665632426738739, |
|
"learning_rate": 1.5851887652614238e-05, |
|
"loss": 0.4325, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7494287890327495, |
|
"grad_norm": 0.5669299960136414, |
|
"learning_rate": 1.5721166601221697e-05, |
|
"loss": 0.4484, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7585681645087585, |
|
"grad_norm": 0.6546853184700012, |
|
"learning_rate": 1.5588975734070717e-05, |
|
"loss": 0.412, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7677075399847677, |
|
"grad_norm": 0.49923935532569885, |
|
"learning_rate": 1.5455349012105488e-05, |
|
"loss": 0.4289, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7768469154607769, |
|
"grad_norm": 0.6773508191108704, |
|
"learning_rate": 1.5320320765153367e-05, |
|
"loss": 0.4269, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.785986290936786, |
|
"grad_norm": 0.5767094492912292, |
|
"learning_rate": 1.5183925683105254e-05, |
|
"loss": 0.4285, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7951256664127951, |
|
"grad_norm": 0.6376355886459351, |
|
"learning_rate": 1.504619880700346e-05, |
|
"loss": 0.4258, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8042650418888042, |
|
"grad_norm": 0.5988445281982422, |
|
"learning_rate": 1.4907175520039381e-05, |
|
"loss": 0.4192, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8134044173648134, |
|
"grad_norm": 0.633305013179779, |
|
"learning_rate": 1.4766891538463255e-05, |
|
"loss": 0.4166, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8225437928408226, |
|
"grad_norm": 0.5571799278259277, |
|
"learning_rate": 1.4625382902408356e-05, |
|
"loss": 0.4125, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8316831683168316, |
|
"grad_norm": 0.5751418471336365, |
|
"learning_rate": 1.448268596663197e-05, |
|
"loss": 0.4044, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8408225437928408, |
|
"grad_norm": 0.5511994957923889, |
|
"learning_rate": 1.4338837391175582e-05, |
|
"loss": 0.4227, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.84996191926885, |
|
"grad_norm": 0.5298687219619751, |
|
"learning_rate": 1.419387413194657e-05, |
|
"loss": 0.4073, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8591012947448591, |
|
"grad_norm": 0.5533376932144165, |
|
"learning_rate": 1.4047833431223938e-05, |
|
"loss": 0.4132, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8682406702208683, |
|
"grad_norm": 0.5788342356681824, |
|
"learning_rate": 1.390075280809047e-05, |
|
"loss": 0.4135, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8773800456968773, |
|
"grad_norm": 0.5516043901443481, |
|
"learning_rate": 1.3752670048793744e-05, |
|
"loss": 0.4144, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8865194211728865, |
|
"grad_norm": 0.5432876348495483, |
|
"learning_rate": 1.3603623197038536e-05, |
|
"loss": 0.4211, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8956587966488957, |
|
"grad_norm": 0.6421664953231812, |
|
"learning_rate": 1.3453650544213078e-05, |
|
"loss": 0.4329, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9047981721249048, |
|
"grad_norm": 0.5505258440971375, |
|
"learning_rate": 1.3302790619551673e-05, |
|
"loss": 0.4012, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.913937547600914, |
|
"grad_norm": 0.6094185709953308, |
|
"learning_rate": 1.315108218023621e-05, |
|
"loss": 0.4206, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.5962590575218201, |
|
"learning_rate": 1.2998564201439117e-05, |
|
"loss": 0.4206, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9322162985529322, |
|
"grad_norm": 0.576652467250824, |
|
"learning_rate": 1.2845275866310325e-05, |
|
"loss": 0.4181, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9413556740289414, |
|
"grad_norm": 0.5562915205955505, |
|
"learning_rate": 1.2691256555910769e-05, |
|
"loss": 0.4134, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9504950495049505, |
|
"grad_norm": 0.5346901416778564, |
|
"learning_rate": 1.2536545839095074e-05, |
|
"loss": 0.4363, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9596344249809596, |
|
"grad_norm": 0.5291918516159058, |
|
"learning_rate": 1.2381183462345983e-05, |
|
"loss": 0.4189, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9687738004569688, |
|
"grad_norm": 0.5179046988487244, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.4131, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9779131759329779, |
|
"grad_norm": 0.50760418176651, |
|
"learning_rate": 1.206866354180891e-05, |
|
"loss": 0.4145, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9870525514089871, |
|
"grad_norm": 0.483079195022583, |
|
"learning_rate": 1.1911586287013726e-05, |
|
"loss": 0.4208, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9961919268849961, |
|
"grad_norm": 0.5721336007118225, |
|
"learning_rate": 1.1754017929643818e-05, |
|
"loss": 0.4141, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0053313023610053, |
|
"grad_norm": 0.9483687281608582, |
|
"learning_rate": 1.1595998950333794e-05, |
|
"loss": 0.5968, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0144706778370145, |
|
"grad_norm": 0.6470979452133179, |
|
"learning_rate": 1.143756994548682e-05, |
|
"loss": 0.3799, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.0236100533130237, |
|
"grad_norm": 0.5971588492393494, |
|
"learning_rate": 1.1278771616845061e-05, |
|
"loss": 0.3711, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0327494287890326, |
|
"grad_norm": 0.5583009123802185, |
|
"learning_rate": 1.1119644761033079e-05, |
|
"loss": 0.3657, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0418888042650418, |
|
"grad_norm": 0.5742236971855164, |
|
"learning_rate": 1.0960230259076819e-05, |
|
"loss": 0.3561, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.051028179741051, |
|
"grad_norm": 0.6248900294303894, |
|
"learning_rate": 1.0800569065900935e-05, |
|
"loss": 0.3861, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0601675552170602, |
|
"grad_norm": 0.6363320350646973, |
|
"learning_rate": 1.064070219980713e-05, |
|
"loss": 0.3392, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0693069306930694, |
|
"grad_norm": 0.5456411838531494, |
|
"learning_rate": 1.0480670731936209e-05, |
|
"loss": 0.3361, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0784463061690786, |
|
"grad_norm": 0.510452389717102, |
|
"learning_rate": 1.0320515775716556e-05, |
|
"loss": 0.3245, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0875856816450875, |
|
"grad_norm": 0.5856946110725403, |
|
"learning_rate": 1.0160278476301739e-05, |
|
"loss": 0.409, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0967250571210967, |
|
"grad_norm": 0.5407898426055908, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3577, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.1058644325971059, |
|
"grad_norm": 0.4385759234428406, |
|
"learning_rate": 9.839721523698265e-06, |
|
"loss": 0.2774, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.115003808073115, |
|
"grad_norm": 0.5878434777259827, |
|
"learning_rate": 9.67948422428345e-06, |
|
"loss": 0.3834, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1241431835491242, |
|
"grad_norm": 0.5798720121383667, |
|
"learning_rate": 9.519329268063795e-06, |
|
"loss": 0.3729, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1332825590251332, |
|
"grad_norm": 0.5163602828979492, |
|
"learning_rate": 9.359297800192873e-06, |
|
"loss": 0.335, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1424219345011424, |
|
"grad_norm": 0.5222927331924438, |
|
"learning_rate": 9.199430934099068e-06, |
|
"loss": 0.3317, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.1515613099771516, |
|
"grad_norm": 0.5279880166053772, |
|
"learning_rate": 9.039769740923183e-06, |
|
"loss": 0.3216, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1607006854531607, |
|
"grad_norm": 0.5063551068305969, |
|
"learning_rate": 8.880355238966923e-06, |
|
"loss": 0.3386, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.16984006092917, |
|
"grad_norm": 0.5172789692878723, |
|
"learning_rate": 8.721228383154939e-06, |
|
"loss": 0.3507, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.178979436405179, |
|
"grad_norm": 0.49883145093917847, |
|
"learning_rate": 8.562430054513184e-06, |
|
"loss": 0.3447, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.188118811881188, |
|
"grad_norm": 0.503009021282196, |
|
"learning_rate": 8.404001049666211e-06, |
|
"loss": 0.348, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1972581873571972, |
|
"grad_norm": 0.5342569351196289, |
|
"learning_rate": 8.245982070356186e-06, |
|
"loss": 0.3237, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2063975628332064, |
|
"grad_norm": 0.49484220147132874, |
|
"learning_rate": 8.08841371298628e-06, |
|
"loss": 0.3438, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2155369383092156, |
|
"grad_norm": 0.48607689142227173, |
|
"learning_rate": 7.931336458191092e-06, |
|
"loss": 0.3744, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2246763137852246, |
|
"grad_norm": 0.46569332480430603, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.3359, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2338156892612338, |
|
"grad_norm": 0.46949270367622375, |
|
"learning_rate": 7.618816537654018e-06, |
|
"loss": 0.3473, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.242955064737243, |
|
"grad_norm": 0.43653199076652527, |
|
"learning_rate": 7.463454160904928e-06, |
|
"loss": 0.3022, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2520944402132521, |
|
"grad_norm": 0.5573130249977112, |
|
"learning_rate": 7.308743444089232e-06, |
|
"loss": 0.3571, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2612338156892613, |
|
"grad_norm": 0.4045010805130005, |
|
"learning_rate": 7.154724133689677e-06, |
|
"loss": 0.322, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2703731911652705, |
|
"grad_norm": 0.5753940939903259, |
|
"learning_rate": 7.001435798560884e-06, |
|
"loss": 0.348, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2795125666412794, |
|
"grad_norm": 0.5088015794754028, |
|
"learning_rate": 6.848917819763794e-06, |
|
"loss": 0.3265, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2886519421172886, |
|
"grad_norm": 0.4818384051322937, |
|
"learning_rate": 6.697209380448333e-06, |
|
"loss": 0.3585, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2977913175932978, |
|
"grad_norm": 0.5760669708251953, |
|
"learning_rate": 6.546349455786926e-06, |
|
"loss": 0.3969, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.306930693069307, |
|
"grad_norm": 0.46207141876220703, |
|
"learning_rate": 6.396376802961468e-06, |
|
"loss": 0.3448, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.316070068545316, |
|
"grad_norm": 0.423732727766037, |
|
"learning_rate": 6.24732995120626e-06, |
|
"loss": 0.3273, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.3252094440213251, |
|
"grad_norm": 0.42002105712890625, |
|
"learning_rate": 6.099247191909532e-06, |
|
"loss": 0.3247, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.3343488194973343, |
|
"grad_norm": 0.43163520097732544, |
|
"learning_rate": 5.952166568776062e-06, |
|
"loss": 0.3673, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3434881949733435, |
|
"grad_norm": 0.44529059529304504, |
|
"learning_rate": 5.806125868053433e-06, |
|
"loss": 0.3081, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3526275704493527, |
|
"grad_norm": 0.44915682077407837, |
|
"learning_rate": 5.66116260882442e-06, |
|
"loss": 0.3201, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3617669459253618, |
|
"grad_norm": 0.45789527893066406, |
|
"learning_rate": 5.517314033368031e-06, |
|
"loss": 0.3533, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.370906321401371, |
|
"grad_norm": 0.4622362554073334, |
|
"learning_rate": 5.37461709759165e-06, |
|
"loss": 0.355, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.38004569687738, |
|
"grad_norm": 0.45579296350479126, |
|
"learning_rate": 5.233108461536749e-06, |
|
"loss": 0.3515, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.3891850723533892, |
|
"grad_norm": 0.42939919233322144, |
|
"learning_rate": 5.092824479960625e-06, |
|
"loss": 0.3592, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3983244478293984, |
|
"grad_norm": 0.48685169219970703, |
|
"learning_rate": 4.9538011929965436e-06, |
|
"loss": 0.373, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.4074638233054075, |
|
"grad_norm": 0.4406936466693878, |
|
"learning_rate": 4.81607431689475e-06, |
|
"loss": 0.3346, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4166031987814165, |
|
"grad_norm": 0.46851012110710144, |
|
"learning_rate": 4.679679234846636e-06, |
|
"loss": 0.3602, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4257425742574257, |
|
"grad_norm": 0.40592843294143677, |
|
"learning_rate": 4.544650987894514e-06, |
|
"loss": 0.3088, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.4348819497334349, |
|
"grad_norm": 0.4123000204563141, |
|
"learning_rate": 4.411024265929283e-06, |
|
"loss": 0.3343, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.444021325209444, |
|
"grad_norm": 0.395128071308136, |
|
"learning_rate": 4.278833398778306e-06, |
|
"loss": 0.2924, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4531607006854532, |
|
"grad_norm": 0.4521404206752777, |
|
"learning_rate": 4.148112347385762e-06, |
|
"loss": 0.3816, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.4623000761614624, |
|
"grad_norm": 0.41636931896209717, |
|
"learning_rate": 4.01889469508784e-06, |
|
"loss": 0.3352, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4714394516374714, |
|
"grad_norm": 0.43509963154792786, |
|
"learning_rate": 3.891213638984858e-06, |
|
"loss": 0.3559, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.4805788271134805, |
|
"grad_norm": 0.37414830923080444, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 0.3027, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4897182025894897, |
|
"grad_norm": 0.44813913106918335, |
|
"learning_rate": 3.6405921215154492e-06, |
|
"loss": 0.3676, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.498857578065499, |
|
"grad_norm": 0.4593113958835602, |
|
"learning_rate": 3.5177160469221184e-06, |
|
"loss": 0.3432, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.5079969535415079, |
|
"grad_norm": 0.3725077211856842, |
|
"learning_rate": 3.3965053255284085e-06, |
|
"loss": 0.2981, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.517136329017517, |
|
"grad_norm": 0.4420298933982849, |
|
"learning_rate": 3.2769910973868314e-06, |
|
"loss": 0.3699, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.5262757044935262, |
|
"grad_norm": 0.3754097819328308, |
|
"learning_rate": 3.1592040667065393e-06, |
|
"loss": 0.3133, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.5354150799695354, |
|
"grad_norm": 0.44490984082221985, |
|
"learning_rate": 3.0431744939651365e-06, |
|
"loss": 0.3577, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5445544554455446, |
|
"grad_norm": 0.3956274688243866, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.3087, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.5536938309215538, |
|
"grad_norm": 0.44501689076423645, |
|
"learning_rate": 2.8165064990227255e-06, |
|
"loss": 0.3393, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.562833206397563, |
|
"grad_norm": 0.45090392231941223, |
|
"learning_rate": 2.7059263097336595e-06, |
|
"loss": 0.3807, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.5719725818735721, |
|
"grad_norm": 0.42932793498039246, |
|
"learning_rate": 2.597220029246846e-06, |
|
"loss": 0.369, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.581111957349581, |
|
"grad_norm": 0.3550121784210205, |
|
"learning_rate": 2.490415585118887e-06, |
|
"loss": 0.29, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5902513328255903, |
|
"grad_norm": 0.4236467182636261, |
|
"learning_rate": 2.3855404163086558e-06, |
|
"loss": 0.3518, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5993907083015992, |
|
"grad_norm": 0.37905916571617126, |
|
"learning_rate": 2.282621466127982e-06, |
|
"loss": 0.297, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6085300837776084, |
|
"grad_norm": 0.3965452313423157, |
|
"learning_rate": 2.1816851753197023e-06, |
|
"loss": 0.3166, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6176694592536176, |
|
"grad_norm": 0.43022918701171875, |
|
"learning_rate": 2.082757475264804e-06, |
|
"loss": 0.3552, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.6268088347296268, |
|
"grad_norm": 0.40345683693885803, |
|
"learning_rate": 1.9858637813204352e-06, |
|
"loss": 0.2897, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.635948210205636, |
|
"grad_norm": 0.4786721169948578, |
|
"learning_rate": 1.8910289862904917e-06, |
|
"loss": 0.414, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6450875856816451, |
|
"grad_norm": 0.3699612319469452, |
|
"learning_rate": 1.7982774540304404e-06, |
|
"loss": 0.288, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.6542269611576543, |
|
"grad_norm": 0.41491949558258057, |
|
"learning_rate": 1.7076330131880525e-06, |
|
"loss": 0.3297, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6633663366336635, |
|
"grad_norm": 0.40379223227500916, |
|
"learning_rate": 1.6191189510815942e-06, |
|
"loss": 0.3822, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6725057121096725, |
|
"grad_norm": 0.34889909625053406, |
|
"learning_rate": 1.5327580077171589e-06, |
|
"loss": 0.305, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6816450875856817, |
|
"grad_norm": 0.3765786588191986, |
|
"learning_rate": 1.4485723699465392e-06, |
|
"loss": 0.3624, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6907844630616908, |
|
"grad_norm": 0.36284613609313965, |
|
"learning_rate": 1.3665836657672493e-06, |
|
"loss": 0.3178, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6999238385376998, |
|
"grad_norm": 0.3935321867465973, |
|
"learning_rate": 1.286812958766106e-06, |
|
"loss": 0.3743, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.709063214013709, |
|
"grad_norm": 0.3890056312084198, |
|
"learning_rate": 1.209280742707828e-06, |
|
"loss": 0.3397, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.7182025894897182, |
|
"grad_norm": 0.40561848878860474, |
|
"learning_rate": 1.134006936269999e-06, |
|
"loss": 0.3465, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.7273419649657273, |
|
"grad_norm": 0.43405306339263916, |
|
"learning_rate": 1.0610108779258043e-06, |
|
"loss": 0.3369, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.7364813404417365, |
|
"grad_norm": 0.418133407831192, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.3499, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.7456207159177457, |
|
"grad_norm": 0.36436179280281067, |
|
"learning_rate": 9.2192642873008e-07, |
|
"loss": 0.3311, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.7547600913937549, |
|
"grad_norm": 0.3649395704269409, |
|
"learning_rate": 8.558737698418762e-07, |
|
"loss": 0.3279, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.763899466869764, |
|
"grad_norm": 0.3702605962753296, |
|
"learning_rate": 7.921703137941172e-07, |
|
"loss": 0.335, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.773038842345773, |
|
"grad_norm": 0.3535550534725189, |
|
"learning_rate": 7.308324265397837e-07, |
|
"loss": 0.3227, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7821782178217822, |
|
"grad_norm": 0.3407094478607178, |
|
"learning_rate": 6.718758662973524e-07, |
|
"loss": 0.325, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7913175932977912, |
|
"grad_norm": 0.3515715003013611, |
|
"learning_rate": 6.153157795023956e-07, |
|
"loss": 0.3623, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.8004569687738003, |
|
"grad_norm": 0.3598881959915161, |
|
"learning_rate": 5.611666969163243e-07, |
|
"loss": 0.3614, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8095963442498095, |
|
"grad_norm": 0.34573909640312195, |
|
"learning_rate": 5.094425298933136e-07, |
|
"loss": 0.341, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.8187357197258187, |
|
"grad_norm": 0.3632952868938446, |
|
"learning_rate": 4.6015656680636234e-07, |
|
"loss": 0.3511, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.827875095201828, |
|
"grad_norm": 0.35777807235717773, |
|
"learning_rate": 4.133214696333943e-07, |
|
"loss": 0.343, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.837014470677837, |
|
"grad_norm": 0.3659260869026184, |
|
"learning_rate": 3.6894927070429744e-07, |
|
"loss": 0.324, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.37839674949645996, |
|
"learning_rate": 3.2705136960970554e-07, |
|
"loss": 0.3629, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8552932216298554, |
|
"grad_norm": 0.36181095242500305, |
|
"learning_rate": 2.8763853027236277e-07, |
|
"loss": 0.3303, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8644325971058644, |
|
"grad_norm": 0.36267733573913574, |
|
"learning_rate": 2.507208781817638e-07, |
|
"loss": 0.3305, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8735719725818736, |
|
"grad_norm": 0.37985163927078247, |
|
"learning_rate": 2.1630789779284677e-07, |
|
"loss": 0.3638, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8827113480578828, |
|
"grad_norm": 0.3690941333770752, |
|
"learning_rate": 1.844084300893456e-07, |
|
"loss": 0.297, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8918507235338917, |
|
"grad_norm": 0.3706071674823761, |
|
"learning_rate": 1.55030670312476e-07, |
|
"loss": 0.3285, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.900990099009901, |
|
"grad_norm": 0.38501277565956116, |
|
"learning_rate": 1.2818216585549824e-07, |
|
"loss": 0.3927, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.91012947448591, |
|
"grad_norm": 0.35342392325401306, |
|
"learning_rate": 1.0386981432474075e-07, |
|
"loss": 0.3389, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.9192688499619193, |
|
"grad_norm": 0.34960195422172546, |
|
"learning_rate": 8.209986176753947e-08, |
|
"loss": 0.3235, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.9284082254379284, |
|
"grad_norm": 0.37650802731513977, |
|
"learning_rate": 6.287790106757396e-08, |
|
"loss": 0.3658, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.9375476009139376, |
|
"grad_norm": 0.34741005301475525, |
|
"learning_rate": 4.6208870508017703e-08, |
|
"loss": 0.3335, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.9466869763899468, |
|
"grad_norm": 0.3389255702495575, |
|
"learning_rate": 3.2097052502843005e-08, |
|
"loss": 0.304, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.955826351865956, |
|
"grad_norm": 0.3613230586051941, |
|
"learning_rate": 2.054607249663665e-08, |
|
"loss": 0.3668, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.964965727341965, |
|
"grad_norm": 0.36797016859054565, |
|
"learning_rate": 1.1558898033191545e-08, |
|
"loss": 0.3406, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.9741051028179741, |
|
"grad_norm": 0.3596664071083069, |
|
"learning_rate": 5.137837993121064e-09, |
|
"loss": 0.3294, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.983244478293983, |
|
"grad_norm": 0.3750191628932953, |
|
"learning_rate": 1.2845420006879494e-09, |
|
"loss": 0.3285, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9923838537699923, |
|
"grad_norm": 0.41900479793548584, |
|
"learning_rate": 0.0, |
|
"loss": 0.3674, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.9923838537699923, |
|
"step": 218, |
|
"total_flos": 165914164723712.0, |
|
"train_loss": 0.4081570637335471, |
|
"train_runtime": 15049.1519, |
|
"train_samples_per_second": 1.395, |
|
"train_steps_per_second": 0.014 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 218, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 165914164723712.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|