|
{ |
|
"best_metric": 1.0612136125564575, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.007591285204585136, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 7.591285204585137e-05, |
|
"grad_norm": 4.00889778137207, |
|
"learning_rate": 1.004e-05, |
|
"loss": 3.3766, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 7.591285204585137e-05, |
|
"eval_loss": 3.8744077682495117, |
|
"eval_runtime": 240.3929, |
|
"eval_samples_per_second": 23.075, |
|
"eval_steps_per_second": 5.77, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00015182570409170274, |
|
"grad_norm": 4.259544849395752, |
|
"learning_rate": 2.008e-05, |
|
"loss": 3.129, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0002277385561375541, |
|
"grad_norm": 4.009559631347656, |
|
"learning_rate": 3.012e-05, |
|
"loss": 3.5625, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0003036514081834055, |
|
"grad_norm": 3.93902587890625, |
|
"learning_rate": 4.016e-05, |
|
"loss": 3.2738, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0003795642602292568, |
|
"grad_norm": 4.684957981109619, |
|
"learning_rate": 5.02e-05, |
|
"loss": 3.4879, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0004554771122751082, |
|
"grad_norm": 4.2233781814575195, |
|
"learning_rate": 6.024e-05, |
|
"loss": 3.6261, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0005313899643209595, |
|
"grad_norm": 4.0557332038879395, |
|
"learning_rate": 7.028e-05, |
|
"loss": 3.4864, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.000607302816366811, |
|
"grad_norm": 4.355313777923584, |
|
"learning_rate": 8.032e-05, |
|
"loss": 2.8283, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0006832156684126623, |
|
"grad_norm": 4.009000301361084, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 3.1648, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0007591285204585136, |
|
"grad_norm": 3.8348472118377686, |
|
"learning_rate": 0.0001004, |
|
"loss": 2.9517, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0008350413725043649, |
|
"grad_norm": 7.264204978942871, |
|
"learning_rate": 9.987157894736842e-05, |
|
"loss": 2.7876, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0009109542245502164, |
|
"grad_norm": 8.179417610168457, |
|
"learning_rate": 9.934315789473684e-05, |
|
"loss": 1.9061, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0009868670765960678, |
|
"grad_norm": 6.091889381408691, |
|
"learning_rate": 9.881473684210525e-05, |
|
"loss": 1.6486, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.001062779928641919, |
|
"grad_norm": 5.305070877075195, |
|
"learning_rate": 9.828631578947369e-05, |
|
"loss": 2.1729, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0011386927806877705, |
|
"grad_norm": 6.4168548583984375, |
|
"learning_rate": 9.77578947368421e-05, |
|
"loss": 1.7497, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.001214605632733622, |
|
"grad_norm": 5.889248847961426, |
|
"learning_rate": 9.722947368421052e-05, |
|
"loss": 1.6712, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0012905184847794731, |
|
"grad_norm": 5.305861949920654, |
|
"learning_rate": 9.670105263157895e-05, |
|
"loss": 1.71, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0013664313368253246, |
|
"grad_norm": 5.730749607086182, |
|
"learning_rate": 9.617263157894737e-05, |
|
"loss": 1.5306, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.001442344188871176, |
|
"grad_norm": 8.906538963317871, |
|
"learning_rate": 9.564421052631579e-05, |
|
"loss": 2.0866, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0015182570409170272, |
|
"grad_norm": 6.357343673706055, |
|
"learning_rate": 9.511578947368421e-05, |
|
"loss": 1.9055, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0015941698929628787, |
|
"grad_norm": 4.676600456237793, |
|
"learning_rate": 9.458736842105264e-05, |
|
"loss": 1.3255, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0016700827450087299, |
|
"grad_norm": 5.27098274230957, |
|
"learning_rate": 9.405894736842106e-05, |
|
"loss": 1.4943, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0017459955970545813, |
|
"grad_norm": 4.284830093383789, |
|
"learning_rate": 9.353052631578947e-05, |
|
"loss": 1.2089, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0018219084491004327, |
|
"grad_norm": 3.9922897815704346, |
|
"learning_rate": 9.300210526315789e-05, |
|
"loss": 1.2562, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.001897821301146284, |
|
"grad_norm": 4.029862880706787, |
|
"learning_rate": 9.247368421052631e-05, |
|
"loss": 1.504, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0019737341531921356, |
|
"grad_norm": 3.5687670707702637, |
|
"learning_rate": 9.194526315789473e-05, |
|
"loss": 1.4237, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0020496470052379866, |
|
"grad_norm": 3.2173025608062744, |
|
"learning_rate": 9.141684210526316e-05, |
|
"loss": 1.0595, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.002125559857283838, |
|
"grad_norm": 3.408003568649292, |
|
"learning_rate": 9.088842105263158e-05, |
|
"loss": 0.7801, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0022014727093296895, |
|
"grad_norm": 3.468625068664551, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 1.2455, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.002277385561375541, |
|
"grad_norm": 4.7892279624938965, |
|
"learning_rate": 8.983157894736843e-05, |
|
"loss": 1.3866, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0023532984134213924, |
|
"grad_norm": 3.3346621990203857, |
|
"learning_rate": 8.930315789473684e-05, |
|
"loss": 1.2134, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.002429211265467244, |
|
"grad_norm": 3.797645092010498, |
|
"learning_rate": 8.877473684210526e-05, |
|
"loss": 1.3581, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.002505124117513095, |
|
"grad_norm": 3.729444980621338, |
|
"learning_rate": 8.824631578947368e-05, |
|
"loss": 1.1622, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0025810369695589462, |
|
"grad_norm": 3.769681215286255, |
|
"learning_rate": 8.771789473684211e-05, |
|
"loss": 0.9735, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0026569498216047977, |
|
"grad_norm": 4.146418571472168, |
|
"learning_rate": 8.718947368421053e-05, |
|
"loss": 0.8318, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.002732862673650649, |
|
"grad_norm": 3.002995491027832, |
|
"learning_rate": 8.666105263157895e-05, |
|
"loss": 0.7628, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0028087755256965006, |
|
"grad_norm": 3.8767268657684326, |
|
"learning_rate": 8.613263157894737e-05, |
|
"loss": 1.1052, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.002884688377742352, |
|
"grad_norm": 5.0641069412231445, |
|
"learning_rate": 8.560421052631578e-05, |
|
"loss": 0.932, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.002960601229788203, |
|
"grad_norm": 3.3671984672546387, |
|
"learning_rate": 8.50757894736842e-05, |
|
"loss": 0.9676, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0030365140818340544, |
|
"grad_norm": 4.778345584869385, |
|
"learning_rate": 8.454736842105263e-05, |
|
"loss": 0.9301, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.003112426933879906, |
|
"grad_norm": 4.1439595222473145, |
|
"learning_rate": 8.401894736842106e-05, |
|
"loss": 0.8163, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0031883397859257573, |
|
"grad_norm": 2.6622302532196045, |
|
"learning_rate": 8.349052631578948e-05, |
|
"loss": 0.8257, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0032642526379716087, |
|
"grad_norm": 3.152009963989258, |
|
"learning_rate": 8.29621052631579e-05, |
|
"loss": 0.8255, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0033401654900174597, |
|
"grad_norm": 2.4167747497558594, |
|
"learning_rate": 8.243368421052632e-05, |
|
"loss": 0.7538, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.003416078342063311, |
|
"grad_norm": 3.019895553588867, |
|
"learning_rate": 8.190526315789474e-05, |
|
"loss": 0.952, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0034919911941091626, |
|
"grad_norm": 3.5709264278411865, |
|
"learning_rate": 8.137684210526315e-05, |
|
"loss": 0.7851, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.003567904046155014, |
|
"grad_norm": 3.9194111824035645, |
|
"learning_rate": 8.084842105263157e-05, |
|
"loss": 0.8724, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0036438168982008655, |
|
"grad_norm": 4.6772894859313965, |
|
"learning_rate": 8.032e-05, |
|
"loss": 0.8268, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.003719729750246717, |
|
"grad_norm": 3.5549869537353516, |
|
"learning_rate": 7.979157894736842e-05, |
|
"loss": 0.7529, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.003795642602292568, |
|
"grad_norm": 2.6311495304107666, |
|
"learning_rate": 7.926315789473684e-05, |
|
"loss": 0.7944, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.003795642602292568, |
|
"eval_loss": 1.2532055377960205, |
|
"eval_runtime": 242.1049, |
|
"eval_samples_per_second": 22.912, |
|
"eval_steps_per_second": 5.729, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0038715554543384194, |
|
"grad_norm": 3.332703113555908, |
|
"learning_rate": 7.873473684210526e-05, |
|
"loss": 1.4012, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.003947468306384271, |
|
"grad_norm": 3.9371657371520996, |
|
"learning_rate": 7.820631578947369e-05, |
|
"loss": 2.001, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.004023381158430122, |
|
"grad_norm": 2.853745460510254, |
|
"learning_rate": 7.76778947368421e-05, |
|
"loss": 1.3783, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.004099294010475973, |
|
"grad_norm": 3.1998298168182373, |
|
"learning_rate": 7.714947368421052e-05, |
|
"loss": 1.8504, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.004175206862521825, |
|
"grad_norm": 3.126289129257202, |
|
"learning_rate": 7.662105263157896e-05, |
|
"loss": 1.862, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.004251119714567676, |
|
"grad_norm": 2.4768309593200684, |
|
"learning_rate": 7.609263157894737e-05, |
|
"loss": 1.3451, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0043270325666135276, |
|
"grad_norm": 2.919741630554199, |
|
"learning_rate": 7.556421052631579e-05, |
|
"loss": 1.4162, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.004402945418659379, |
|
"grad_norm": 2.9070191383361816, |
|
"learning_rate": 7.503578947368421e-05, |
|
"loss": 1.3421, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.00447885827070523, |
|
"grad_norm": 3.0319571495056152, |
|
"learning_rate": 7.450736842105263e-05, |
|
"loss": 1.7147, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.004554771122751082, |
|
"grad_norm": 3.456721544265747, |
|
"learning_rate": 7.397894736842105e-05, |
|
"loss": 1.2985, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.004630683974796933, |
|
"grad_norm": 3.245173215866089, |
|
"learning_rate": 7.345052631578948e-05, |
|
"loss": 1.7002, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.004706596826842785, |
|
"grad_norm": 2.7266035079956055, |
|
"learning_rate": 7.29221052631579e-05, |
|
"loss": 1.527, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.004782509678888636, |
|
"grad_norm": 3.55993914604187, |
|
"learning_rate": 7.239368421052631e-05, |
|
"loss": 1.9147, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.004858422530934488, |
|
"grad_norm": 2.028764009475708, |
|
"learning_rate": 7.186526315789474e-05, |
|
"loss": 1.0744, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.004934335382980338, |
|
"grad_norm": 3.0651049613952637, |
|
"learning_rate": 7.133684210526316e-05, |
|
"loss": 1.1072, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.00501024823502619, |
|
"grad_norm": 2.8773818016052246, |
|
"learning_rate": 7.080842105263158e-05, |
|
"loss": 1.3428, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.005086161087072041, |
|
"grad_norm": 2.439342737197876, |
|
"learning_rate": 7.028e-05, |
|
"loss": 0.9611, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0051620739391178925, |
|
"grad_norm": 3.0564000606536865, |
|
"learning_rate": 6.975157894736843e-05, |
|
"loss": 1.1857, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.005237986791163744, |
|
"grad_norm": 5.345979690551758, |
|
"learning_rate": 6.922315789473685e-05, |
|
"loss": 1.4022, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.005313899643209595, |
|
"grad_norm": 2.436023712158203, |
|
"learning_rate": 6.869473684210527e-05, |
|
"loss": 0.9688, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005389812495255447, |
|
"grad_norm": 3.2191972732543945, |
|
"learning_rate": 6.816631578947368e-05, |
|
"loss": 1.2126, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.005465725347301298, |
|
"grad_norm": 1.942346215248108, |
|
"learning_rate": 6.76378947368421e-05, |
|
"loss": 0.8721, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.00554163819934715, |
|
"grad_norm": 4.746232509613037, |
|
"learning_rate": 6.710947368421052e-05, |
|
"loss": 1.392, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.005617551051393001, |
|
"grad_norm": 2.850257158279419, |
|
"learning_rate": 6.658105263157894e-05, |
|
"loss": 1.1243, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0056934639034388525, |
|
"grad_norm": 2.983518362045288, |
|
"learning_rate": 6.605263157894737e-05, |
|
"loss": 1.1275, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.005769376755484704, |
|
"grad_norm": 2.9279556274414062, |
|
"learning_rate": 6.55242105263158e-05, |
|
"loss": 1.5262, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0058452896075305546, |
|
"grad_norm": 6.875275135040283, |
|
"learning_rate": 6.499578947368422e-05, |
|
"loss": 0.8056, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.005921202459576406, |
|
"grad_norm": 2.7808749675750732, |
|
"learning_rate": 6.446736842105264e-05, |
|
"loss": 1.097, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.005997115311622257, |
|
"grad_norm": 6.791175842285156, |
|
"learning_rate": 6.393894736842105e-05, |
|
"loss": 0.7137, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.006073028163668109, |
|
"grad_norm": 3.0166051387786865, |
|
"learning_rate": 6.341052631578947e-05, |
|
"loss": 1.3371, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.00614894101571396, |
|
"grad_norm": 5.060026168823242, |
|
"learning_rate": 6.288210526315789e-05, |
|
"loss": 0.8866, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.006224853867759812, |
|
"grad_norm": 4.755941390991211, |
|
"learning_rate": 6.235368421052632e-05, |
|
"loss": 1.1797, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.006300766719805663, |
|
"grad_norm": 2.5491960048675537, |
|
"learning_rate": 6.182526315789474e-05, |
|
"loss": 0.7077, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.006376679571851515, |
|
"grad_norm": 2.4443137645721436, |
|
"learning_rate": 6.129684210526316e-05, |
|
"loss": 0.8194, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.006452592423897366, |
|
"grad_norm": 2.4709208011627197, |
|
"learning_rate": 6.076842105263158e-05, |
|
"loss": 0.8501, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0065285052759432175, |
|
"grad_norm": 2.1877200603485107, |
|
"learning_rate": 6.024e-05, |
|
"loss": 0.768, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.006604418127989069, |
|
"grad_norm": 3.9589478969573975, |
|
"learning_rate": 5.971157894736842e-05, |
|
"loss": 0.8284, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0066803309800349195, |
|
"grad_norm": 2.8392932415008545, |
|
"learning_rate": 5.9183157894736835e-05, |
|
"loss": 0.7405, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.006756243832080771, |
|
"grad_norm": 4.0319318771362305, |
|
"learning_rate": 5.8654736842105267e-05, |
|
"loss": 0.9588, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.006832156684126622, |
|
"grad_norm": 4.04771614074707, |
|
"learning_rate": 5.8126315789473684e-05, |
|
"loss": 1.0062, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.006908069536172474, |
|
"grad_norm": 2.8250598907470703, |
|
"learning_rate": 5.759789473684211e-05, |
|
"loss": 0.7596, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.006983982388218325, |
|
"grad_norm": 2.2574899196624756, |
|
"learning_rate": 5.706947368421053e-05, |
|
"loss": 0.6135, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.007059895240264177, |
|
"grad_norm": 2.3029751777648926, |
|
"learning_rate": 5.6541052631578945e-05, |
|
"loss": 0.9518, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.007135808092310028, |
|
"grad_norm": 5.060135364532471, |
|
"learning_rate": 5.601263157894736e-05, |
|
"loss": 1.0503, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0072117209443558795, |
|
"grad_norm": 2.9369940757751465, |
|
"learning_rate": 5.5484210526315794e-05, |
|
"loss": 0.7239, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.007287633796401731, |
|
"grad_norm": 1.8883877992630005, |
|
"learning_rate": 5.495578947368421e-05, |
|
"loss": 0.6678, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.007363546648447582, |
|
"grad_norm": 2.149679660797119, |
|
"learning_rate": 5.442736842105264e-05, |
|
"loss": 0.7075, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.007439459500493434, |
|
"grad_norm": 3.17543888092041, |
|
"learning_rate": 5.3898947368421055e-05, |
|
"loss": 0.8025, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.007515372352539285, |
|
"grad_norm": 4.000500679016113, |
|
"learning_rate": 5.337052631578947e-05, |
|
"loss": 0.742, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.007591285204585136, |
|
"grad_norm": 2.3594398498535156, |
|
"learning_rate": 5.284210526315789e-05, |
|
"loss": 0.6751, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.007591285204585136, |
|
"eval_loss": 1.0612136125564575, |
|
"eval_runtime": 241.2405, |
|
"eval_samples_per_second": 22.994, |
|
"eval_steps_per_second": 5.749, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6803618267136000.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|