|
{ |
|
"best_metric": 12.421979904174805, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 3.0, |
|
"eval_steps": 50, |
|
"global_step": 153, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"grad_norm": 0.011493449099361897, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 12.4571, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"eval_loss": 12.453781127929688, |
|
"eval_runtime": 0.7609, |
|
"eval_samples_per_second": 904.211, |
|
"eval_steps_per_second": 28.914, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0392156862745098, |
|
"grad_norm": 0.010760590434074402, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 12.457, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 0.010758738964796066, |
|
"learning_rate": 3.0299999999999998e-05, |
|
"loss": 12.4561, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 0.011239373125135899, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 12.4552, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.09803921568627451, |
|
"grad_norm": 0.011815620586276054, |
|
"learning_rate": 5.05e-05, |
|
"loss": 12.4542, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.012975395657122135, |
|
"learning_rate": 6.0599999999999996e-05, |
|
"loss": 12.4538, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.13725490196078433, |
|
"grad_norm": 0.012917593121528625, |
|
"learning_rate": 7.07e-05, |
|
"loss": 12.4582, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1568627450980392, |
|
"grad_norm": 0.009971358813345432, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 12.4562, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 0.010108977556228638, |
|
"learning_rate": 9.09e-05, |
|
"loss": 12.4525, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.19607843137254902, |
|
"grad_norm": 0.010624304413795471, |
|
"learning_rate": 0.000101, |
|
"loss": 12.457, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.21568627450980393, |
|
"grad_norm": 0.011670473963022232, |
|
"learning_rate": 0.0001002937062937063, |
|
"loss": 12.4541, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.013510797172784805, |
|
"learning_rate": 9.958741258741259e-05, |
|
"loss": 12.4538, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.2549019607843137, |
|
"grad_norm": 0.014974705874919891, |
|
"learning_rate": 9.888111888111889e-05, |
|
"loss": 12.4582, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.27450980392156865, |
|
"grad_norm": 0.012437070719897747, |
|
"learning_rate": 9.817482517482517e-05, |
|
"loss": 12.4567, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 0.010862158611416817, |
|
"learning_rate": 9.746853146853147e-05, |
|
"loss": 12.4558, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.3137254901960784, |
|
"grad_norm": 0.012195524759590626, |
|
"learning_rate": 9.676223776223776e-05, |
|
"loss": 12.4559, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.013258482329547405, |
|
"learning_rate": 9.605594405594406e-05, |
|
"loss": 12.4546, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 0.015779396519064903, |
|
"learning_rate": 9.534965034965035e-05, |
|
"loss": 12.4562, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.37254901960784315, |
|
"grad_norm": 0.016349563375115395, |
|
"learning_rate": 9.464335664335665e-05, |
|
"loss": 12.455, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 0.015274745412170887, |
|
"learning_rate": 9.393706293706294e-05, |
|
"loss": 12.4534, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.4117647058823529, |
|
"grad_norm": 0.014877895824611187, |
|
"learning_rate": 9.323076923076924e-05, |
|
"loss": 12.4522, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.43137254901960786, |
|
"grad_norm": 0.016367629170417786, |
|
"learning_rate": 9.252447552447552e-05, |
|
"loss": 12.4548, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.45098039215686275, |
|
"grad_norm": 0.02055247500538826, |
|
"learning_rate": 9.181818181818182e-05, |
|
"loss": 12.4551, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.023789655417203903, |
|
"learning_rate": 9.111188811188811e-05, |
|
"loss": 12.4569, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.49019607843137253, |
|
"grad_norm": 0.025931550189852715, |
|
"learning_rate": 9.04055944055944e-05, |
|
"loss": 12.4531, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.5098039215686274, |
|
"grad_norm": 0.01956542581319809, |
|
"learning_rate": 8.96993006993007e-05, |
|
"loss": 12.4564, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.5294117647058824, |
|
"grad_norm": 0.02145051211118698, |
|
"learning_rate": 8.8993006993007e-05, |
|
"loss": 12.452, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.5490196078431373, |
|
"grad_norm": 0.024554729461669922, |
|
"learning_rate": 8.828671328671329e-05, |
|
"loss": 12.4567, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.5686274509803921, |
|
"grad_norm": 0.028146198019385338, |
|
"learning_rate": 8.758041958041957e-05, |
|
"loss": 12.4539, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 0.03251597285270691, |
|
"learning_rate": 8.687412587412587e-05, |
|
"loss": 12.4546, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.6078431372549019, |
|
"grad_norm": 0.039151743054389954, |
|
"learning_rate": 8.616783216783216e-05, |
|
"loss": 12.456, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.6274509803921569, |
|
"grad_norm": 0.030295705422759056, |
|
"learning_rate": 8.546153846153846e-05, |
|
"loss": 12.4572, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.6470588235294118, |
|
"grad_norm": 0.029417822137475014, |
|
"learning_rate": 8.475524475524476e-05, |
|
"loss": 12.457, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.03694408759474754, |
|
"learning_rate": 8.404895104895105e-05, |
|
"loss": 12.4547, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.6862745098039216, |
|
"grad_norm": 0.04618909955024719, |
|
"learning_rate": 8.334265734265735e-05, |
|
"loss": 12.4523, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 0.054282210767269135, |
|
"learning_rate": 8.263636363636364e-05, |
|
"loss": 12.453, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.7254901960784313, |
|
"grad_norm": 0.06975057721138, |
|
"learning_rate": 8.193006993006992e-05, |
|
"loss": 12.4552, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.7450980392156863, |
|
"grad_norm": 0.048148512840270996, |
|
"learning_rate": 8.122377622377622e-05, |
|
"loss": 12.4534, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.7647058823529411, |
|
"grad_norm": 0.04206012561917305, |
|
"learning_rate": 8.051748251748251e-05, |
|
"loss": 12.4553, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 0.05756961926817894, |
|
"learning_rate": 7.981118881118881e-05, |
|
"loss": 12.4519, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.803921568627451, |
|
"grad_norm": 0.061868276447057724, |
|
"learning_rate": 7.91048951048951e-05, |
|
"loss": 12.4548, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.8235294117647058, |
|
"grad_norm": 0.07480663061141968, |
|
"learning_rate": 7.83986013986014e-05, |
|
"loss": 12.4546, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.8431372549019608, |
|
"grad_norm": 0.0910768136382103, |
|
"learning_rate": 7.76923076923077e-05, |
|
"loss": 12.4513, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.8627450980392157, |
|
"grad_norm": 0.08593326061964035, |
|
"learning_rate": 7.698601398601398e-05, |
|
"loss": 12.449, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 0.06495288014411926, |
|
"learning_rate": 7.627972027972027e-05, |
|
"loss": 12.4546, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.9019607843137255, |
|
"grad_norm": 0.08239670842885971, |
|
"learning_rate": 7.557342657342657e-05, |
|
"loss": 12.4522, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.9215686274509803, |
|
"grad_norm": 0.10047987848520279, |
|
"learning_rate": 7.486713286713286e-05, |
|
"loss": 12.4537, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.10524290055036545, |
|
"learning_rate": 7.416083916083916e-05, |
|
"loss": 12.45, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.9607843137254902, |
|
"grad_norm": 0.1392570436000824, |
|
"learning_rate": 7.345454545454545e-05, |
|
"loss": 12.4494, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.9803921568627451, |
|
"grad_norm": 0.18665827810764313, |
|
"learning_rate": 7.274825174825175e-05, |
|
"loss": 12.447, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.9803921568627451, |
|
"eval_loss": 12.44759750366211, |
|
"eval_runtime": 0.791, |
|
"eval_samples_per_second": 869.826, |
|
"eval_steps_per_second": 27.814, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.11416830867528915, |
|
"learning_rate": 7.204195804195805e-05, |
|
"loss": 12.453, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.0196078431372548, |
|
"grad_norm": 0.08539585024118423, |
|
"learning_rate": 7.133566433566433e-05, |
|
"loss": 12.4519, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.0392156862745099, |
|
"grad_norm": 0.11319365352392197, |
|
"learning_rate": 7.062937062937062e-05, |
|
"loss": 12.448, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.0588235294117647, |
|
"grad_norm": 0.14616349339485168, |
|
"learning_rate": 6.992307692307692e-05, |
|
"loss": 12.4479, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.0784313725490196, |
|
"grad_norm": 0.15833082795143127, |
|
"learning_rate": 6.921678321678321e-05, |
|
"loss": 12.4471, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.0980392156862746, |
|
"grad_norm": 0.19201552867889404, |
|
"learning_rate": 6.851048951048951e-05, |
|
"loss": 12.4455, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.1176470588235294, |
|
"grad_norm": 0.23073577880859375, |
|
"learning_rate": 6.78041958041958e-05, |
|
"loss": 12.442, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.1372549019607843, |
|
"grad_norm": 0.13355469703674316, |
|
"learning_rate": 6.70979020979021e-05, |
|
"loss": 12.4496, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.156862745098039, |
|
"grad_norm": 0.15398786962032318, |
|
"learning_rate": 6.639160839160838e-05, |
|
"loss": 12.4465, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.1764705882352942, |
|
"grad_norm": 0.1663445234298706, |
|
"learning_rate": 6.568531468531468e-05, |
|
"loss": 12.4482, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.196078431372549, |
|
"grad_norm": 0.2090064436197281, |
|
"learning_rate": 6.497902097902097e-05, |
|
"loss": 12.4475, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.215686274509804, |
|
"grad_norm": 0.2399904429912567, |
|
"learning_rate": 6.427272727272727e-05, |
|
"loss": 12.4436, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.2352941176470589, |
|
"grad_norm": 0.24960361421108246, |
|
"learning_rate": 6.356643356643356e-05, |
|
"loss": 12.444, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.2549019607843137, |
|
"grad_norm": 0.18903537094593048, |
|
"learning_rate": 6.286013986013986e-05, |
|
"loss": 12.4457, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.2745098039215685, |
|
"grad_norm": 0.16718223690986633, |
|
"learning_rate": 6.215384615384615e-05, |
|
"loss": 12.4481, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.2941176470588236, |
|
"grad_norm": 0.2060553878545761, |
|
"learning_rate": 6.144755244755245e-05, |
|
"loss": 12.4452, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.3137254901960784, |
|
"grad_norm": 0.22543393075466156, |
|
"learning_rate": 6.074125874125874e-05, |
|
"loss": 12.4413, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.27220749855041504, |
|
"learning_rate": 6.0034965034965033e-05, |
|
"loss": 12.4421, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.3529411764705883, |
|
"grad_norm": 0.2713835835456848, |
|
"learning_rate": 5.932867132867133e-05, |
|
"loss": 12.4424, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.3725490196078431, |
|
"grad_norm": 0.23676316440105438, |
|
"learning_rate": 5.8622377622377624e-05, |
|
"loss": 12.4398, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.392156862745098, |
|
"grad_norm": 0.1754506677389145, |
|
"learning_rate": 5.791608391608392e-05, |
|
"loss": 12.4448, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.4117647058823528, |
|
"grad_norm": 0.2169537991285324, |
|
"learning_rate": 5.7209790209790215e-05, |
|
"loss": 12.4407, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.4313725490196079, |
|
"grad_norm": 0.2186908721923828, |
|
"learning_rate": 5.650349650349651e-05, |
|
"loss": 12.4367, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.4509803921568627, |
|
"grad_norm": 0.24410758912563324, |
|
"learning_rate": 5.579720279720279e-05, |
|
"loss": 12.4392, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.4705882352941178, |
|
"grad_norm": 0.2627074122428894, |
|
"learning_rate": 5.509090909090909e-05, |
|
"loss": 12.4316, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.4901960784313726, |
|
"grad_norm": 0.2872616946697235, |
|
"learning_rate": 5.438461538461538e-05, |
|
"loss": 12.4323, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.5098039215686274, |
|
"grad_norm": 0.13781462609767914, |
|
"learning_rate": 5.367832167832168e-05, |
|
"loss": 12.443, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.5294117647058822, |
|
"grad_norm": 0.19649550318717957, |
|
"learning_rate": 5.2972027972027974e-05, |
|
"loss": 12.4353, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.5490196078431373, |
|
"grad_norm": 0.19505800306797028, |
|
"learning_rate": 5.226573426573427e-05, |
|
"loss": 12.4398, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"grad_norm": 0.20190319418907166, |
|
"learning_rate": 5.1559440559440565e-05, |
|
"loss": 12.4362, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.5882352941176472, |
|
"grad_norm": 0.18724848330020905, |
|
"learning_rate": 5.085314685314686e-05, |
|
"loss": 12.434, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.607843137254902, |
|
"grad_norm": 0.2352747619152069, |
|
"learning_rate": 5.014685314685315e-05, |
|
"loss": 12.4278, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.6274509803921569, |
|
"grad_norm": 0.14110158383846283, |
|
"learning_rate": 4.9440559440559444e-05, |
|
"loss": 12.4396, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.6470588235294117, |
|
"grad_norm": 0.1315547674894333, |
|
"learning_rate": 4.873426573426573e-05, |
|
"loss": 12.4396, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.13215842843055725, |
|
"learning_rate": 4.802797202797203e-05, |
|
"loss": 12.4354, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.6862745098039216, |
|
"grad_norm": 0.14507846534252167, |
|
"learning_rate": 4.7321678321678324e-05, |
|
"loss": 12.4371, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.7058823529411766, |
|
"grad_norm": 0.1558404117822647, |
|
"learning_rate": 4.661538461538462e-05, |
|
"loss": 12.4326, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.7254901960784315, |
|
"grad_norm": 0.16582131385803223, |
|
"learning_rate": 4.590909090909091e-05, |
|
"loss": 12.4294, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.7450980392156863, |
|
"grad_norm": 0.12328918278217316, |
|
"learning_rate": 4.52027972027972e-05, |
|
"loss": 12.4346, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.7647058823529411, |
|
"grad_norm": 0.10181323438882828, |
|
"learning_rate": 4.44965034965035e-05, |
|
"loss": 12.4404, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.784313725490196, |
|
"grad_norm": 0.11123471707105637, |
|
"learning_rate": 4.379020979020979e-05, |
|
"loss": 12.435, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.803921568627451, |
|
"grad_norm": 0.11661916226148605, |
|
"learning_rate": 4.308391608391608e-05, |
|
"loss": 12.4312, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.8235294117647058, |
|
"grad_norm": 0.09935862571001053, |
|
"learning_rate": 4.237762237762238e-05, |
|
"loss": 12.4318, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.843137254901961, |
|
"grad_norm": 0.12922266125679016, |
|
"learning_rate": 4.167132867132867e-05, |
|
"loss": 12.4327, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.8627450980392157, |
|
"grad_norm": 0.1282566338777542, |
|
"learning_rate": 4.096503496503496e-05, |
|
"loss": 12.4311, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.8823529411764706, |
|
"grad_norm": 0.07856478542089462, |
|
"learning_rate": 4.025874125874126e-05, |
|
"loss": 12.4363, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.9019607843137254, |
|
"grad_norm": 0.09109707176685333, |
|
"learning_rate": 3.955244755244755e-05, |
|
"loss": 12.4274, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.9215686274509802, |
|
"grad_norm": 0.09043984115123749, |
|
"learning_rate": 3.884615384615385e-05, |
|
"loss": 12.4345, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.9411764705882353, |
|
"grad_norm": 0.09672176837921143, |
|
"learning_rate": 3.8139860139860137e-05, |
|
"loss": 12.4294, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"grad_norm": 0.104476198554039, |
|
"learning_rate": 3.743356643356643e-05, |
|
"loss": 12.4295, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"eval_loss": 12.426092147827148, |
|
"eval_runtime": 0.7943, |
|
"eval_samples_per_second": 866.219, |
|
"eval_steps_per_second": 27.699, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.9803921568627452, |
|
"grad_norm": 0.12800978124141693, |
|
"learning_rate": 3.672727272727273e-05, |
|
"loss": 12.4221, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.09714271128177643, |
|
"learning_rate": 3.602097902097902e-05, |
|
"loss": 12.4288, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.019607843137255, |
|
"grad_norm": 0.07660553604364395, |
|
"learning_rate": 3.531468531468531e-05, |
|
"loss": 12.4372, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.0392156862745097, |
|
"grad_norm": 0.07365182787179947, |
|
"learning_rate": 3.460839160839161e-05, |
|
"loss": 12.4342, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.0588235294117645, |
|
"grad_norm": 0.08602866530418396, |
|
"learning_rate": 3.39020979020979e-05, |
|
"loss": 12.4331, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.0784313725490198, |
|
"grad_norm": 0.09166587144136429, |
|
"learning_rate": 3.319580419580419e-05, |
|
"loss": 12.4296, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.0980392156862746, |
|
"grad_norm": 0.0854063555598259, |
|
"learning_rate": 3.2489510489510486e-05, |
|
"loss": 12.4279, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.1176470588235294, |
|
"grad_norm": 0.13535353541374207, |
|
"learning_rate": 3.178321678321678e-05, |
|
"loss": 12.4169, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.1372549019607843, |
|
"grad_norm": 0.08088745176792145, |
|
"learning_rate": 3.107692307692308e-05, |
|
"loss": 12.432, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.156862745098039, |
|
"grad_norm": 0.06271250545978546, |
|
"learning_rate": 3.037062937062937e-05, |
|
"loss": 12.4333, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.176470588235294, |
|
"grad_norm": 0.07285593450069427, |
|
"learning_rate": 2.9664335664335664e-05, |
|
"loss": 12.4325, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.196078431372549, |
|
"grad_norm": 0.06813765317201614, |
|
"learning_rate": 2.895804195804196e-05, |
|
"loss": 12.4286, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.215686274509804, |
|
"grad_norm": 0.09649086743593216, |
|
"learning_rate": 2.8251748251748255e-05, |
|
"loss": 12.4253, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.235294117647059, |
|
"grad_norm": 0.10359369218349457, |
|
"learning_rate": 2.7545454545454544e-05, |
|
"loss": 12.424, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.2549019607843137, |
|
"grad_norm": 0.08479335904121399, |
|
"learning_rate": 2.683916083916084e-05, |
|
"loss": 12.43, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.2745098039215685, |
|
"grad_norm": 0.061394382268190384, |
|
"learning_rate": 2.6132867132867135e-05, |
|
"loss": 12.4359, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.2941176470588234, |
|
"grad_norm": 0.08093006908893585, |
|
"learning_rate": 2.542657342657343e-05, |
|
"loss": 12.4279, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.313725490196078, |
|
"grad_norm": 0.0796235054731369, |
|
"learning_rate": 2.4720279720279722e-05, |
|
"loss": 12.4266, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 0.07345807552337646, |
|
"learning_rate": 2.4013986013986014e-05, |
|
"loss": 12.4301, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"grad_norm": 0.08076856285333633, |
|
"learning_rate": 2.330769230769231e-05, |
|
"loss": 12.4252, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.372549019607843, |
|
"grad_norm": 0.07977347820997238, |
|
"learning_rate": 2.26013986013986e-05, |
|
"loss": 12.4287, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.392156862745098, |
|
"grad_norm": 0.05769919231534004, |
|
"learning_rate": 2.1895104895104893e-05, |
|
"loss": 12.4378, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.411764705882353, |
|
"grad_norm": 0.061103228479623795, |
|
"learning_rate": 2.118881118881119e-05, |
|
"loss": 12.431, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.431372549019608, |
|
"grad_norm": 0.07140219211578369, |
|
"learning_rate": 2.048251748251748e-05, |
|
"loss": 12.4293, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.450980392156863, |
|
"grad_norm": 0.07010167092084885, |
|
"learning_rate": 1.9776223776223776e-05, |
|
"loss": 12.4295, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.4705882352941178, |
|
"grad_norm": 0.07127075642347336, |
|
"learning_rate": 1.9069930069930068e-05, |
|
"loss": 12.4234, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.4901960784313726, |
|
"grad_norm": 0.08472342044115067, |
|
"learning_rate": 1.8363636363636364e-05, |
|
"loss": 12.4188, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.5098039215686274, |
|
"grad_norm": 0.06202932819724083, |
|
"learning_rate": 1.7657342657342656e-05, |
|
"loss": 12.4329, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.5294117647058822, |
|
"grad_norm": 0.0649908259510994, |
|
"learning_rate": 1.695104895104895e-05, |
|
"loss": 12.429, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.549019607843137, |
|
"grad_norm": 0.06271278858184814, |
|
"learning_rate": 1.6244755244755243e-05, |
|
"loss": 12.428, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.568627450980392, |
|
"grad_norm": 0.06860791891813278, |
|
"learning_rate": 1.553846153846154e-05, |
|
"loss": 12.4257, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.588235294117647, |
|
"grad_norm": 0.06407037377357483, |
|
"learning_rate": 1.4832167832167832e-05, |
|
"loss": 12.4273, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.607843137254902, |
|
"grad_norm": 0.07560574263334274, |
|
"learning_rate": 1.4125874125874128e-05, |
|
"loss": 12.4194, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.627450980392157, |
|
"grad_norm": 0.07092591375112534, |
|
"learning_rate": 1.341958041958042e-05, |
|
"loss": 12.4329, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.6470588235294117, |
|
"grad_norm": 0.06500810384750366, |
|
"learning_rate": 1.2713286713286715e-05, |
|
"loss": 12.4296, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.05897223949432373, |
|
"learning_rate": 1.2006993006993007e-05, |
|
"loss": 12.4299, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.686274509803922, |
|
"grad_norm": 0.06739623844623566, |
|
"learning_rate": 1.13006993006993e-05, |
|
"loss": 12.4266, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.7058823529411766, |
|
"grad_norm": 0.06975847482681274, |
|
"learning_rate": 1.0594405594405594e-05, |
|
"loss": 12.4196, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.7254901960784315, |
|
"grad_norm": 0.0807594507932663, |
|
"learning_rate": 9.888111888111888e-06, |
|
"loss": 12.4253, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.7450980392156863, |
|
"grad_norm": 0.06619778275489807, |
|
"learning_rate": 9.181818181818182e-06, |
|
"loss": 12.4315, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.764705882352941, |
|
"grad_norm": 0.06023673340678215, |
|
"learning_rate": 8.475524475524476e-06, |
|
"loss": 12.4321, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.784313725490196, |
|
"grad_norm": 0.06361662596464157, |
|
"learning_rate": 7.76923076923077e-06, |
|
"loss": 12.4256, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.803921568627451, |
|
"grad_norm": 0.07309895008802414, |
|
"learning_rate": 7.062937062937064e-06, |
|
"loss": 12.4306, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.8235294117647056, |
|
"grad_norm": 0.0623571053147316, |
|
"learning_rate": 6.3566433566433575e-06, |
|
"loss": 12.4294, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.843137254901961, |
|
"grad_norm": 0.0636533796787262, |
|
"learning_rate": 5.65034965034965e-06, |
|
"loss": 12.4264, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.8627450980392157, |
|
"grad_norm": 0.08216053992509842, |
|
"learning_rate": 4.944055944055944e-06, |
|
"loss": 12.4216, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.8823529411764706, |
|
"grad_norm": 0.06114799901843071, |
|
"learning_rate": 4.237762237762238e-06, |
|
"loss": 12.4324, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.9019607843137254, |
|
"grad_norm": 0.06452635675668716, |
|
"learning_rate": 3.531468531468532e-06, |
|
"loss": 12.4265, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.9215686274509802, |
|
"grad_norm": 0.06488998979330063, |
|
"learning_rate": 2.825174825174825e-06, |
|
"loss": 12.4244, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"grad_norm": 0.06650799512863159, |
|
"learning_rate": 2.118881118881119e-06, |
|
"loss": 12.4224, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"eval_loss": 12.421979904174805, |
|
"eval_runtime": 0.7954, |
|
"eval_samples_per_second": 864.993, |
|
"eval_steps_per_second": 27.66, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.9607843137254903, |
|
"grad_norm": 0.059388499706983566, |
|
"learning_rate": 1.4125874125874126e-06, |
|
"loss": 12.4267, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.980392156862745, |
|
"grad_norm": 0.09108266234397888, |
|
"learning_rate": 7.062937062937063e-07, |
|
"loss": 12.4191, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.06379084289073944, |
|
"learning_rate": 0.0, |
|
"loss": 12.4279, |
|
"step": 153 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 153, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4720554934272.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|