|
{ |
|
"best_metric": 0.8045907020568848, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 1.6563146997929605, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008281573498964804, |
|
"grad_norm": 0.8388795256614685, |
|
"learning_rate": 7e-06, |
|
"loss": 0.804, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008281573498964804, |
|
"eval_loss": 1.1692047119140625, |
|
"eval_runtime": 32.2894, |
|
"eval_samples_per_second": 6.318, |
|
"eval_steps_per_second": 1.579, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.016563146997929608, |
|
"grad_norm": 1.023823618888855, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.8544, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.024844720496894408, |
|
"grad_norm": 1.0270742177963257, |
|
"learning_rate": 2.1e-05, |
|
"loss": 0.8755, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.033126293995859216, |
|
"grad_norm": 0.9506105780601501, |
|
"learning_rate": 2.8e-05, |
|
"loss": 0.8441, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.041407867494824016, |
|
"grad_norm": 0.7932412028312683, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.7846, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.049689440993788817, |
|
"grad_norm": 0.7128869295120239, |
|
"learning_rate": 4.2e-05, |
|
"loss": 0.9147, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.057971014492753624, |
|
"grad_norm": 0.4176234304904938, |
|
"learning_rate": 4.899999999999999e-05, |
|
"loss": 0.7784, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06625258799171843, |
|
"grad_norm": 0.6762263178825378, |
|
"learning_rate": 5.6e-05, |
|
"loss": 0.8248, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07453416149068323, |
|
"grad_norm": 0.8291305899620056, |
|
"learning_rate": 6.3e-05, |
|
"loss": 0.8014, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08281573498964803, |
|
"grad_norm": 0.8136356472969055, |
|
"learning_rate": 7e-05, |
|
"loss": 0.9253, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09109730848861283, |
|
"grad_norm": 0.4319281578063965, |
|
"learning_rate": 6.999521567473641e-05, |
|
"loss": 0.8555, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.09937888198757763, |
|
"grad_norm": 0.36292925477027893, |
|
"learning_rate": 6.998086400693241e-05, |
|
"loss": 0.7425, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.10766045548654245, |
|
"grad_norm": 0.3616969585418701, |
|
"learning_rate": 6.995694892019065e-05, |
|
"loss": 0.9021, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.11594202898550725, |
|
"grad_norm": 0.3888731002807617, |
|
"learning_rate": 6.99234769526571e-05, |
|
"loss": 0.7912, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.12422360248447205, |
|
"grad_norm": 0.3677065968513489, |
|
"learning_rate": 6.988045725523343e-05, |
|
"loss": 0.6922, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13250517598343686, |
|
"grad_norm": 0.38920730352401733, |
|
"learning_rate": 6.982790158907539e-05, |
|
"loss": 0.8843, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14078674948240166, |
|
"grad_norm": 0.4462592601776123, |
|
"learning_rate": 6.976582432237733e-05, |
|
"loss": 0.9304, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.14906832298136646, |
|
"grad_norm": 0.40514039993286133, |
|
"learning_rate": 6.969424242644413e-05, |
|
"loss": 0.8461, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.15734989648033126, |
|
"grad_norm": 0.44129592180252075, |
|
"learning_rate": 6.961317547105138e-05, |
|
"loss": 0.8518, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.16563146997929606, |
|
"grad_norm": 0.4335060715675354, |
|
"learning_rate": 6.952264561909527e-05, |
|
"loss": 0.8793, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17391304347826086, |
|
"grad_norm": 0.543445885181427, |
|
"learning_rate": 6.942267762053337e-05, |
|
"loss": 0.9113, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.18219461697722567, |
|
"grad_norm": 0.5542380809783936, |
|
"learning_rate": 6.931329880561832e-05, |
|
"loss": 0.9433, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.516521692276001, |
|
"learning_rate": 6.919453907742597e-05, |
|
"loss": 0.9016, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.19875776397515527, |
|
"grad_norm": 0.5378891825675964, |
|
"learning_rate": 6.90664309036802e-05, |
|
"loss": 0.7877, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2070393374741201, |
|
"grad_norm": 0.577548086643219, |
|
"learning_rate": 6.892900930787656e-05, |
|
"loss": 0.8454, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2153209109730849, |
|
"grad_norm": 0.5783238410949707, |
|
"learning_rate": 6.87823118597072e-05, |
|
"loss": 0.8776, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2236024844720497, |
|
"grad_norm": 0.5544853210449219, |
|
"learning_rate": 6.862637866478969e-05, |
|
"loss": 0.8123, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2318840579710145, |
|
"grad_norm": 0.7183583974838257, |
|
"learning_rate": 6.846125235370252e-05, |
|
"loss": 0.941, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2401656314699793, |
|
"grad_norm": 0.7497084140777588, |
|
"learning_rate": 6.828697807033038e-05, |
|
"loss": 0.9533, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2484472049689441, |
|
"grad_norm": 1.0239611864089966, |
|
"learning_rate": 6.81036034595222e-05, |
|
"loss": 1.0226, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2567287784679089, |
|
"grad_norm": 0.215080127120018, |
|
"learning_rate": 6.791117865406564e-05, |
|
"loss": 0.5555, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2650103519668737, |
|
"grad_norm": 0.32441794872283936, |
|
"learning_rate": 6.770975626098112e-05, |
|
"loss": 0.7501, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2732919254658385, |
|
"grad_norm": 0.309184730052948, |
|
"learning_rate": 6.749939134713974e-05, |
|
"loss": 0.7853, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2815734989648033, |
|
"grad_norm": 0.3206530511379242, |
|
"learning_rate": 6.728014142420846e-05, |
|
"loss": 0.659, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2898550724637681, |
|
"grad_norm": 0.2530266046524048, |
|
"learning_rate": 6.7052066432927e-05, |
|
"loss": 0.651, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2981366459627329, |
|
"grad_norm": 0.27744585275650024, |
|
"learning_rate": 6.681522872672069e-05, |
|
"loss": 0.6502, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3064182194616977, |
|
"grad_norm": 0.2372923493385315, |
|
"learning_rate": 6.656969305465356e-05, |
|
"loss": 0.7619, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3146997929606625, |
|
"grad_norm": 0.2609351575374603, |
|
"learning_rate": 6.631552654372672e-05, |
|
"loss": 0.6668, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.32298136645962733, |
|
"grad_norm": 0.2953057885169983, |
|
"learning_rate": 6.60527986805264e-05, |
|
"loss": 0.7653, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.33126293995859213, |
|
"grad_norm": 0.32191261649131775, |
|
"learning_rate": 6.578158129222711e-05, |
|
"loss": 0.8575, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.33954451345755693, |
|
"grad_norm": 0.2882176339626312, |
|
"learning_rate": 6.550194852695469e-05, |
|
"loss": 0.7252, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 0.27066636085510254, |
|
"learning_rate": 6.521397683351509e-05, |
|
"loss": 0.7575, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.35610766045548653, |
|
"grad_norm": 0.3208519518375397, |
|
"learning_rate": 6.491774494049386e-05, |
|
"loss": 0.8635, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.36438923395445133, |
|
"grad_norm": 0.35946550965309143, |
|
"learning_rate": 6.461333383473272e-05, |
|
"loss": 0.8081, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.37267080745341613, |
|
"grad_norm": 0.3770899176597595, |
|
"learning_rate": 6.430082673918849e-05, |
|
"loss": 0.8199, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.36672505736351013, |
|
"learning_rate": 6.398030909018069e-05, |
|
"loss": 0.8569, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.38923395445134573, |
|
"grad_norm": 0.3554442226886749, |
|
"learning_rate": 6.365186851403423e-05, |
|
"loss": 0.8098, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.39751552795031053, |
|
"grad_norm": 0.3235284388065338, |
|
"learning_rate": 6.331559480312315e-05, |
|
"loss": 0.791, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4057971014492754, |
|
"grad_norm": 0.3624836206436157, |
|
"learning_rate": 6.297157989132236e-05, |
|
"loss": 0.7369, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4140786749482402, |
|
"grad_norm": 0.3535378873348236, |
|
"learning_rate": 6.261991782887377e-05, |
|
"loss": 0.6616, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4140786749482402, |
|
"eval_loss": 0.8644523024559021, |
|
"eval_runtime": 32.248, |
|
"eval_samples_per_second": 6.326, |
|
"eval_steps_per_second": 1.581, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.422360248447205, |
|
"grad_norm": 0.3799665868282318, |
|
"learning_rate": 6.226070475667393e-05, |
|
"loss": 0.7939, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4306418219461698, |
|
"grad_norm": 0.4117141664028168, |
|
"learning_rate": 6.189403887999006e-05, |
|
"loss": 0.7855, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4389233954451346, |
|
"grad_norm": 0.4020320177078247, |
|
"learning_rate": 6.152002044161171e-05, |
|
"loss": 0.8022, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.4472049689440994, |
|
"grad_norm": 0.46013712882995605, |
|
"learning_rate": 6.113875169444539e-05, |
|
"loss": 0.8572, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4554865424430642, |
|
"grad_norm": 0.45274391770362854, |
|
"learning_rate": 6.0750336873559605e-05, |
|
"loss": 0.7123, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.463768115942029, |
|
"grad_norm": 0.5365428328514099, |
|
"learning_rate": 6.035488216768811e-05, |
|
"loss": 0.8758, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.4720496894409938, |
|
"grad_norm": 0.641646146774292, |
|
"learning_rate": 5.9952495690198894e-05, |
|
"loss": 0.9564, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.4803312629399586, |
|
"grad_norm": 0.508011519908905, |
|
"learning_rate": 5.954328744953709e-05, |
|
"loss": 0.8731, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.4886128364389234, |
|
"grad_norm": 0.8867300152778625, |
|
"learning_rate": 5.91273693191498e-05, |
|
"loss": 0.9009, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.4968944099378882, |
|
"grad_norm": 0.9791533946990967, |
|
"learning_rate": 5.870485500690094e-05, |
|
"loss": 1.1371, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.505175983436853, |
|
"grad_norm": 0.24218077957630157, |
|
"learning_rate": 5.827586002398468e-05, |
|
"loss": 0.6875, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5134575569358178, |
|
"grad_norm": 0.22737136483192444, |
|
"learning_rate": 5.784050165334589e-05, |
|
"loss": 0.7116, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5217391304347826, |
|
"grad_norm": 0.2559671103954315, |
|
"learning_rate": 5.739889891761608e-05, |
|
"loss": 0.7719, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5300207039337475, |
|
"grad_norm": 0.24487318098545074, |
|
"learning_rate": 5.6951172546573794e-05, |
|
"loss": 0.7212, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5383022774327122, |
|
"grad_norm": 0.23421718180179596, |
|
"learning_rate": 5.6497444944138376e-05, |
|
"loss": 0.7471, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.546583850931677, |
|
"grad_norm": 0.21821844577789307, |
|
"learning_rate": 5.603784015490587e-05, |
|
"loss": 0.6552, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5548654244306418, |
|
"grad_norm": 0.26210862398147583, |
|
"learning_rate": 5.557248383023655e-05, |
|
"loss": 0.774, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5631469979296067, |
|
"grad_norm": 0.2591906785964966, |
|
"learning_rate": 5.510150319390302e-05, |
|
"loss": 0.687, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.26978063583374023, |
|
"learning_rate": 5.4625027007308546e-05, |
|
"loss": 0.7278, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.5797101449275363, |
|
"grad_norm": 0.24709314107894897, |
|
"learning_rate": 5.414318553428494e-05, |
|
"loss": 0.5976, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.587991718426501, |
|
"grad_norm": 0.30029296875, |
|
"learning_rate": 5.3656110505479776e-05, |
|
"loss": 0.7129, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.5962732919254659, |
|
"grad_norm": 0.32972994446754456, |
|
"learning_rate": 5.316393508234253e-05, |
|
"loss": 0.8332, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6045548654244306, |
|
"grad_norm": 0.3365096151828766, |
|
"learning_rate": 5.266679382071953e-05, |
|
"loss": 0.8121, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6128364389233955, |
|
"grad_norm": 0.31908273696899414, |
|
"learning_rate": 5.216482263406778e-05, |
|
"loss": 0.8543, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6211180124223602, |
|
"grad_norm": 0.3485449552536011, |
|
"learning_rate": 5.1658158756297576e-05, |
|
"loss": 0.8116, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.629399585921325, |
|
"grad_norm": 0.33620256185531616, |
|
"learning_rate": 5.114694070425407e-05, |
|
"loss": 0.7493, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6376811594202898, |
|
"grad_norm": 0.35500848293304443, |
|
"learning_rate": 5.063130823984823e-05, |
|
"loss": 0.7026, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6459627329192547, |
|
"grad_norm": 0.4022809565067291, |
|
"learning_rate": 5.011140233184724e-05, |
|
"loss": 0.7928, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6542443064182195, |
|
"grad_norm": 0.41914957761764526, |
|
"learning_rate": 4.958736511733516e-05, |
|
"loss": 0.8219, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.6625258799171843, |
|
"grad_norm": 0.4002333879470825, |
|
"learning_rate": 4.905933986285393e-05, |
|
"loss": 0.8019, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6708074534161491, |
|
"grad_norm": 0.46458199620246887, |
|
"learning_rate": 4.8527470925235824e-05, |
|
"loss": 0.7353, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.6790890269151139, |
|
"grad_norm": 0.5049306154251099, |
|
"learning_rate": 4.799190371213772e-05, |
|
"loss": 0.935, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.6873706004140787, |
|
"grad_norm": 0.48192158341407776, |
|
"learning_rate": 4.745278464228808e-05, |
|
"loss": 0.8568, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 0.5385396480560303, |
|
"learning_rate": 4.69102611054575e-05, |
|
"loss": 0.8613, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7039337474120083, |
|
"grad_norm": 0.4860864579677582, |
|
"learning_rate": 4.6364481422163926e-05, |
|
"loss": 0.8288, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7122153209109731, |
|
"grad_norm": 0.4691753387451172, |
|
"learning_rate": 4.581559480312316e-05, |
|
"loss": 0.7301, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7204968944099379, |
|
"grad_norm": 0.6713128685951233, |
|
"learning_rate": 4.526375130845627e-05, |
|
"loss": 1.0118, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7287784679089027, |
|
"grad_norm": 0.8337951302528381, |
|
"learning_rate": 4.4709101806664554e-05, |
|
"loss": 0.8735, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7370600414078675, |
|
"grad_norm": 0.7612811326980591, |
|
"learning_rate": 4.4151797933383685e-05, |
|
"loss": 0.9304, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7453416149068323, |
|
"grad_norm": 0.8643386960029602, |
|
"learning_rate": 4.359199204992797e-05, |
|
"loss": 1.0096, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7536231884057971, |
|
"grad_norm": 0.20685485005378723, |
|
"learning_rate": 4.30298372016363e-05, |
|
"loss": 0.6227, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.25332385301589966, |
|
"learning_rate": 4.246548707603114e-05, |
|
"loss": 0.7315, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.7701863354037267, |
|
"grad_norm": 0.24437353014945984, |
|
"learning_rate": 4.1899095960801805e-05, |
|
"loss": 0.6921, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.7784679089026915, |
|
"grad_norm": 0.2593900263309479, |
|
"learning_rate": 4.133081870162385e-05, |
|
"loss": 0.7514, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.7867494824016563, |
|
"grad_norm": 0.2574782371520996, |
|
"learning_rate": 4.076081065982569e-05, |
|
"loss": 0.6447, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.7950310559006211, |
|
"grad_norm": 0.3053584396839142, |
|
"learning_rate": 4.018922766991447e-05, |
|
"loss": 0.6437, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8033126293995859, |
|
"grad_norm": 0.2642808258533478, |
|
"learning_rate": 3.961622599697241e-05, |
|
"loss": 0.7539, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8115942028985508, |
|
"grad_norm": 0.2618858814239502, |
|
"learning_rate": 3.9041962293935516e-05, |
|
"loss": 0.6641, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8198757763975155, |
|
"grad_norm": 0.2602124512195587, |
|
"learning_rate": 3.84665935587662e-05, |
|
"loss": 0.6166, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8281573498964804, |
|
"grad_norm": 0.2887020707130432, |
|
"learning_rate": 3.7890277091531636e-05, |
|
"loss": 0.6502, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8281573498964804, |
|
"eval_loss": 0.8196816444396973, |
|
"eval_runtime": 32.2536, |
|
"eval_samples_per_second": 6.325, |
|
"eval_steps_per_second": 1.581, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8364389233954451, |
|
"grad_norm": 0.3054017126560211, |
|
"learning_rate": 3.7313170451399475e-05, |
|
"loss": 0.7171, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.84472049689441, |
|
"grad_norm": 0.2971940040588379, |
|
"learning_rate": 3.673543141356278e-05, |
|
"loss": 0.7747, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8530020703933747, |
|
"grad_norm": 0.28216373920440674, |
|
"learning_rate": 3.6157217926105783e-05, |
|
"loss": 0.7735, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.8612836438923396, |
|
"grad_norm": 0.3302093744277954, |
|
"learning_rate": 3.557868806682255e-05, |
|
"loss": 0.7433, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 0.39043137431144714, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.8474, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.8778467908902692, |
|
"grad_norm": 0.3387686312198639, |
|
"learning_rate": 3.442131193317745e-05, |
|
"loss": 0.7744, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.8861283643892339, |
|
"grad_norm": 0.3744807243347168, |
|
"learning_rate": 3.384278207389421e-05, |
|
"loss": 0.844, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.8944099378881988, |
|
"grad_norm": 0.3628387749195099, |
|
"learning_rate": 3.3264568586437216e-05, |
|
"loss": 0.7452, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9026915113871635, |
|
"grad_norm": 0.34833672642707825, |
|
"learning_rate": 3.268682954860052e-05, |
|
"loss": 0.727, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9109730848861284, |
|
"grad_norm": 0.38752731680870056, |
|
"learning_rate": 3.210972290846837e-05, |
|
"loss": 0.7226, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9192546583850931, |
|
"grad_norm": 0.4460507035255432, |
|
"learning_rate": 3.15334064412338e-05, |
|
"loss": 0.783, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.927536231884058, |
|
"grad_norm": 0.4155882000923157, |
|
"learning_rate": 3.0958037706064485e-05, |
|
"loss": 0.7733, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9358178053830227, |
|
"grad_norm": 0.42916351556777954, |
|
"learning_rate": 3.038377400302758e-05, |
|
"loss": 0.8132, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.9440993788819876, |
|
"grad_norm": 0.40436169505119324, |
|
"learning_rate": 2.9810772330085524e-05, |
|
"loss": 0.7043, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.5273613333702087, |
|
"learning_rate": 2.9239189340174306e-05, |
|
"loss": 0.8491, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.9606625258799172, |
|
"grad_norm": 0.5109385848045349, |
|
"learning_rate": 2.8669181298376163e-05, |
|
"loss": 0.7627, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.968944099378882, |
|
"grad_norm": 0.6382851600646973, |
|
"learning_rate": 2.8100904039198193e-05, |
|
"loss": 0.7009, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.9772256728778468, |
|
"grad_norm": 0.6231806874275208, |
|
"learning_rate": 2.7534512923968863e-05, |
|
"loss": 1.0499, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.9855072463768116, |
|
"grad_norm": 0.8011189103126526, |
|
"learning_rate": 2.6970162798363695e-05, |
|
"loss": 0.9278, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.9937888198757764, |
|
"grad_norm": 0.8758801221847534, |
|
"learning_rate": 2.640800795007203e-05, |
|
"loss": 1.0722, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0020703933747412, |
|
"grad_norm": 0.6602959632873535, |
|
"learning_rate": 2.5848202066616305e-05, |
|
"loss": 0.8362, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.010351966873706, |
|
"grad_norm": 0.22043457627296448, |
|
"learning_rate": 2.5290898193335446e-05, |
|
"loss": 0.6068, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0186335403726707, |
|
"grad_norm": 0.2249661386013031, |
|
"learning_rate": 2.4736248691543736e-05, |
|
"loss": 0.6206, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.0269151138716357, |
|
"grad_norm": 0.23550471663475037, |
|
"learning_rate": 2.4184405196876842e-05, |
|
"loss": 0.7048, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0351966873706004, |
|
"grad_norm": 0.2546531856060028, |
|
"learning_rate": 2.363551857783608e-05, |
|
"loss": 0.6356, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0434782608695652, |
|
"grad_norm": 0.23183418810367584, |
|
"learning_rate": 2.308973889454249e-05, |
|
"loss": 0.7009, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.05175983436853, |
|
"grad_norm": 0.23346415162086487, |
|
"learning_rate": 2.2547215357711918e-05, |
|
"loss": 0.5939, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.060041407867495, |
|
"grad_norm": 0.24210385978221893, |
|
"learning_rate": 2.2008096287862266e-05, |
|
"loss": 0.7173, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.0683229813664596, |
|
"grad_norm": 0.22531944513320923, |
|
"learning_rate": 2.1472529074764177e-05, |
|
"loss": 0.5273, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.0766045548654244, |
|
"grad_norm": 0.25816595554351807, |
|
"learning_rate": 2.0940660137146074e-05, |
|
"loss": 0.6477, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.0848861283643891, |
|
"grad_norm": 0.2804325520992279, |
|
"learning_rate": 2.041263488266484e-05, |
|
"loss": 0.6571, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.093167701863354, |
|
"grad_norm": 0.274699330329895, |
|
"learning_rate": 1.988859766815275e-05, |
|
"loss": 0.6704, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1014492753623188, |
|
"grad_norm": 0.26135748624801636, |
|
"learning_rate": 1.9368691760151773e-05, |
|
"loss": 0.6641, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1097308488612836, |
|
"grad_norm": 0.3417491912841797, |
|
"learning_rate": 1.885305929574593e-05, |
|
"loss": 0.8094, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.1180124223602483, |
|
"grad_norm": 0.2966398298740387, |
|
"learning_rate": 1.8341841243702424e-05, |
|
"loss": 0.7448, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.1262939958592133, |
|
"grad_norm": 0.29686564207077026, |
|
"learning_rate": 1.7835177365932225e-05, |
|
"loss": 0.6288, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.134575569358178, |
|
"grad_norm": 0.332364946603775, |
|
"learning_rate": 1.7333206179280478e-05, |
|
"loss": 0.7955, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 0.3861692249774933, |
|
"learning_rate": 1.6836064917657478e-05, |
|
"loss": 0.6987, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.1511387163561078, |
|
"grad_norm": 0.351142555475235, |
|
"learning_rate": 1.6343889494520224e-05, |
|
"loss": 0.72, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.1594202898550725, |
|
"grad_norm": 0.3705615997314453, |
|
"learning_rate": 1.5856814465715064e-05, |
|
"loss": 0.5818, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.1677018633540373, |
|
"grad_norm": 0.36091944575309753, |
|
"learning_rate": 1.5374972992691458e-05, |
|
"loss": 0.6673, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.175983436853002, |
|
"grad_norm": 0.39567896723747253, |
|
"learning_rate": 1.4898496806096974e-05, |
|
"loss": 0.6597, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.184265010351967, |
|
"grad_norm": 0.38994473218917847, |
|
"learning_rate": 1.4427516169763444e-05, |
|
"loss": 0.6453, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.1925465838509317, |
|
"grad_norm": 0.4518220126628876, |
|
"learning_rate": 1.396215984509412e-05, |
|
"loss": 0.7173, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2008281573498965, |
|
"grad_norm": 0.4618150591850281, |
|
"learning_rate": 1.3502555055861625e-05, |
|
"loss": 0.6811, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2091097308488612, |
|
"grad_norm": 0.4686547517776489, |
|
"learning_rate": 1.3048827453426203e-05, |
|
"loss": 0.6195, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.2173913043478262, |
|
"grad_norm": 0.4487763047218323, |
|
"learning_rate": 1.2601101082383917e-05, |
|
"loss": 0.6075, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.225672877846791, |
|
"grad_norm": 0.5483669638633728, |
|
"learning_rate": 1.2159498346654094e-05, |
|
"loss": 0.6045, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.2339544513457557, |
|
"grad_norm": 0.573219358921051, |
|
"learning_rate": 1.1724139976015306e-05, |
|
"loss": 0.7711, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.2422360248447206, |
|
"grad_norm": 0.6374213099479675, |
|
"learning_rate": 1.1295144993099068e-05, |
|
"loss": 0.6176, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2422360248447206, |
|
"eval_loss": 0.8175904750823975, |
|
"eval_runtime": 32.7299, |
|
"eval_samples_per_second": 6.233, |
|
"eval_steps_per_second": 1.558, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.2505175983436854, |
|
"grad_norm": 0.5926390290260315, |
|
"learning_rate": 1.0872630680850196e-05, |
|
"loss": 0.8074, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.25879917184265, |
|
"grad_norm": 0.2985449433326721, |
|
"learning_rate": 1.0456712550462898e-05, |
|
"loss": 0.5224, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.2670807453416149, |
|
"grad_norm": 0.3578867018222809, |
|
"learning_rate": 1.0047504309801104e-05, |
|
"loss": 0.6502, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.2753623188405796, |
|
"grad_norm": 0.3537825047969818, |
|
"learning_rate": 9.645117832311886e-06, |
|
"loss": 0.6731, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.2836438923395446, |
|
"grad_norm": 0.3358495533466339, |
|
"learning_rate": 9.249663126440394e-06, |
|
"loss": 0.6796, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.2919254658385093, |
|
"grad_norm": 0.29126879572868347, |
|
"learning_rate": 8.861248305554624e-06, |
|
"loss": 0.5348, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.300207039337474, |
|
"grad_norm": 0.317704439163208, |
|
"learning_rate": 8.47997955838829e-06, |
|
"loss": 0.6091, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.308488612836439, |
|
"grad_norm": 0.320115864276886, |
|
"learning_rate": 8.10596112000994e-06, |
|
"loss": 0.6371, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.3167701863354038, |
|
"grad_norm": 0.33530059456825256, |
|
"learning_rate": 7.739295243326067e-06, |
|
"loss": 0.6329, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.3250517598343685, |
|
"grad_norm": 0.31264767050743103, |
|
"learning_rate": 7.380082171126228e-06, |
|
"loss": 0.654, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.32048940658569336, |
|
"learning_rate": 7.028420108677635e-06, |
|
"loss": 0.5732, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.341614906832298, |
|
"grad_norm": 0.33851999044418335, |
|
"learning_rate": 6.684405196876842e-06, |
|
"loss": 0.686, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.349896480331263, |
|
"grad_norm": 0.32517871260643005, |
|
"learning_rate": 6.3481314859657675e-06, |
|
"loss": 0.65, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.3581780538302277, |
|
"grad_norm": 0.2995445430278778, |
|
"learning_rate": 6.019690909819298e-06, |
|
"loss": 0.5915, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.3664596273291925, |
|
"grad_norm": 0.3606933653354645, |
|
"learning_rate": 5.6991732608115e-06, |
|
"loss": 0.7172, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.3747412008281574, |
|
"grad_norm": 0.3630608320236206, |
|
"learning_rate": 5.386666165267256e-06, |
|
"loss": 0.675, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.3830227743271222, |
|
"grad_norm": 0.36110809445381165, |
|
"learning_rate": 5.08225505950613e-06, |
|
"loss": 0.7061, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.391304347826087, |
|
"grad_norm": 0.3950647711753845, |
|
"learning_rate": 4.786023166484913e-06, |
|
"loss": 0.673, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.3995859213250519, |
|
"grad_norm": 0.3615090847015381, |
|
"learning_rate": 4.498051473045291e-06, |
|
"loss": 0.6181, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.4078674948240166, |
|
"grad_norm": 0.3696867823600769, |
|
"learning_rate": 4.218418707772886e-06, |
|
"loss": 0.6945, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.4161490683229814, |
|
"grad_norm": 0.3751198947429657, |
|
"learning_rate": 3.947201319473587e-06, |
|
"loss": 0.6095, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.4244306418219461, |
|
"grad_norm": 0.4392836093902588, |
|
"learning_rate": 3.684473456273278e-06, |
|
"loss": 0.7707, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.4327122153209109, |
|
"grad_norm": 0.38536399602890015, |
|
"learning_rate": 3.4303069453464383e-06, |
|
"loss": 0.5751, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.4409937888198758, |
|
"grad_norm": 0.44222986698150635, |
|
"learning_rate": 3.184771273279312e-06, |
|
"loss": 0.6717, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.4492753623188406, |
|
"grad_norm": 0.3905293047428131, |
|
"learning_rate": 2.947933567072987e-06, |
|
"loss": 0.5321, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.4575569358178053, |
|
"grad_norm": 0.49432647228240967, |
|
"learning_rate": 2.719858575791534e-06, |
|
"loss": 0.6326, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.4658385093167703, |
|
"grad_norm": 0.49581611156463623, |
|
"learning_rate": 2.500608652860256e-06, |
|
"loss": 0.6167, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.474120082815735, |
|
"grad_norm": 0.5105964541435242, |
|
"learning_rate": 2.2902437390188737e-06, |
|
"loss": 0.6004, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.4824016563146998, |
|
"grad_norm": 0.6480532884597778, |
|
"learning_rate": 2.0888213459343587e-06, |
|
"loss": 0.5838, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.4906832298136645, |
|
"grad_norm": 0.7326604723930359, |
|
"learning_rate": 1.8963965404777875e-06, |
|
"loss": 0.6759, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.4989648033126293, |
|
"grad_norm": 0.6053556799888611, |
|
"learning_rate": 1.7130219296696263e-06, |
|
"loss": 0.6138, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.5072463768115942, |
|
"grad_norm": 0.21714286506175995, |
|
"learning_rate": 1.5387476462974824e-06, |
|
"loss": 0.5251, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.515527950310559, |
|
"grad_norm": 0.2275223731994629, |
|
"learning_rate": 1.3736213352103147e-06, |
|
"loss": 0.6187, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.5238095238095237, |
|
"grad_norm": 0.2726026177406311, |
|
"learning_rate": 1.2176881402928002e-06, |
|
"loss": 0.7148, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.5320910973084887, |
|
"grad_norm": 0.22267615795135498, |
|
"learning_rate": 1.0709906921234367e-06, |
|
"loss": 0.4634, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.5403726708074534, |
|
"grad_norm": 0.23520763218402863, |
|
"learning_rate": 9.33569096319799e-07, |
|
"loss": 0.6049, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.5486542443064182, |
|
"grad_norm": 0.2739216387271881, |
|
"learning_rate": 8.054609225740255e-07, |
|
"loss": 0.784, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.5569358178053831, |
|
"grad_norm": 0.27384185791015625, |
|
"learning_rate": 6.867011943816724e-07, |
|
"loss": 0.6121, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.5652173913043477, |
|
"grad_norm": 0.3003735840320587, |
|
"learning_rate": 5.77322379466617e-07, |
|
"loss": 0.6644, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.5734989648033126, |
|
"grad_norm": 0.34010234475135803, |
|
"learning_rate": 4.773543809047186e-07, |
|
"loss": 0.6084, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.5817805383022774, |
|
"grad_norm": 0.29969656467437744, |
|
"learning_rate": 3.868245289486027e-07, |
|
"loss": 0.6366, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.5900621118012421, |
|
"grad_norm": 0.29340919852256775, |
|
"learning_rate": 3.0575757355586817e-07, |
|
"loss": 0.638, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.598343685300207, |
|
"grad_norm": 0.32281142473220825, |
|
"learning_rate": 2.3417567762266497e-07, |
|
"loss": 0.6753, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.6066252587991718, |
|
"grad_norm": 0.3351536989212036, |
|
"learning_rate": 1.7209841092460043e-07, |
|
"loss": 0.6692, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.6149068322981366, |
|
"grad_norm": 0.44075658917427063, |
|
"learning_rate": 1.1954274476655534e-07, |
|
"loss": 0.603, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.6231884057971016, |
|
"grad_norm": 0.3639785945415497, |
|
"learning_rate": 7.652304734289127e-08, |
|
"loss": 0.6775, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.6314699792960663, |
|
"grad_norm": 0.36761367321014404, |
|
"learning_rate": 4.30510798093342e-08, |
|
"loss": 0.7417, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.639751552795031, |
|
"grad_norm": 0.3425098657608032, |
|
"learning_rate": 1.9135993067588284e-08, |
|
"loss": 0.6121, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.648033126293996, |
|
"grad_norm": 0.38983044028282166, |
|
"learning_rate": 4.784325263584854e-09, |
|
"loss": 0.6343, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.6563146997929605, |
|
"grad_norm": 0.37634265422821045, |
|
"learning_rate": 0.0, |
|
"loss": 0.6523, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.6563146997929605, |
|
"eval_loss": 0.8045907020568848, |
|
"eval_runtime": 32.2688, |
|
"eval_samples_per_second": 6.322, |
|
"eval_steps_per_second": 1.58, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.558232004284908e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|