|
{ |
|
"best_metric": 37.93360828750764, |
|
"best_model_checkpoint": "/kaggle/working/whisper-small-fa/checkpoint-1200", |
|
"epoch": 0.5862237420615535, |
|
"eval_steps": 200, |
|
"global_step": 1200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004885197850512946, |
|
"grad_norm": Infinity, |
|
"learning_rate": 0.0, |
|
"loss": 1.0812, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004885197850512946, |
|
"grad_norm": 7.0439229011535645, |
|
"learning_rate": 1.6e-07, |
|
"loss": 1.1303, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009770395701025891, |
|
"grad_norm": 6.462230205535889, |
|
"learning_rate": 3.6e-07, |
|
"loss": 1.1685, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.014655593551538837, |
|
"grad_norm": 6.182502269744873, |
|
"learning_rate": 5.6e-07, |
|
"loss": 1.1928, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.019540791402051783, |
|
"grad_norm": 5.509102821350098, |
|
"learning_rate": 7.6e-07, |
|
"loss": 1.0699, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.024425989252564728, |
|
"grad_norm": 6.310844421386719, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.1117, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.029311187103077674, |
|
"grad_norm": 4.859102725982666, |
|
"learning_rate": 1.1600000000000001e-06, |
|
"loss": 1.0681, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03419638495359062, |
|
"grad_norm": 5.078814506530762, |
|
"learning_rate": 1.3600000000000001e-06, |
|
"loss": 1.0524, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.039081582804103565, |
|
"grad_norm": 5.108152866363525, |
|
"learning_rate": 1.56e-06, |
|
"loss": 0.9862, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04396678065461651, |
|
"grad_norm": 6.09797477722168, |
|
"learning_rate": 1.76e-06, |
|
"loss": 1.0003, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.048851978505129456, |
|
"grad_norm": 6.775981426239014, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 0.9876, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0537371763556424, |
|
"grad_norm": 4.734734535217285, |
|
"learning_rate": 2.16e-06, |
|
"loss": 0.9026, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05862237420615535, |
|
"grad_norm": 5.119976043701172, |
|
"learning_rate": 2.3600000000000003e-06, |
|
"loss": 0.9783, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0635075720566683, |
|
"grad_norm": 4.984339714050293, |
|
"learning_rate": 2.56e-06, |
|
"loss": 0.9671, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06839276990718124, |
|
"grad_norm": 5.38236665725708, |
|
"learning_rate": 2.7600000000000003e-06, |
|
"loss": 0.9263, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07327796775769418, |
|
"grad_norm": 4.587165355682373, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.9063, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07816316560820713, |
|
"grad_norm": 4.583350658416748, |
|
"learning_rate": 3.1600000000000002e-06, |
|
"loss": 0.84, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08304836345872008, |
|
"grad_norm": 4.574127674102783, |
|
"learning_rate": 3.3600000000000004e-06, |
|
"loss": 0.833, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08793356130923302, |
|
"grad_norm": 4.4891486167907715, |
|
"learning_rate": 3.5600000000000002e-06, |
|
"loss": 0.7965, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09281875915974597, |
|
"grad_norm": 4.560169696807861, |
|
"learning_rate": 3.7600000000000004e-06, |
|
"loss": 0.7703, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09770395701025891, |
|
"grad_norm": 5.059728145599365, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.8702, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09770395701025891, |
|
"eval_loss": 0.7939865589141846, |
|
"eval_runtime": 4279.5784, |
|
"eval_samples_per_second": 0.956, |
|
"eval_steps_per_second": 0.12, |
|
"eval_wer": 55.69627284341499, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10258915486077186, |
|
"grad_norm": 4.930312156677246, |
|
"learning_rate": 4.16e-06, |
|
"loss": 0.7978, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1074743527112848, |
|
"grad_norm": 4.10357666015625, |
|
"learning_rate": 4.360000000000001e-06, |
|
"loss": 0.7512, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11235955056179775, |
|
"grad_norm": 4.250857830047607, |
|
"learning_rate": 4.56e-06, |
|
"loss": 0.7242, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1172447484123107, |
|
"grad_norm": 4.367275714874268, |
|
"learning_rate": 4.76e-06, |
|
"loss": 0.7473, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12212994626282364, |
|
"grad_norm": 5.22398042678833, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 0.8216, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1270151441133366, |
|
"grad_norm": 4.164461135864258, |
|
"learning_rate": 5.1600000000000006e-06, |
|
"loss": 0.6435, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.13190034196384953, |
|
"grad_norm": 5.729864120483398, |
|
"learning_rate": 5.36e-06, |
|
"loss": 0.7428, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.13678553981436248, |
|
"grad_norm": 4.261190891265869, |
|
"learning_rate": 5.560000000000001e-06, |
|
"loss": 0.7081, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14167073766487542, |
|
"grad_norm": 3.940690279006958, |
|
"learning_rate": 5.76e-06, |
|
"loss": 0.63, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.14655593551538837, |
|
"grad_norm": 3.8406989574432373, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 0.6711, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.15144113336590131, |
|
"grad_norm": 3.9562718868255615, |
|
"learning_rate": 6.16e-06, |
|
"loss": 0.6992, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.15632633121641426, |
|
"grad_norm": 4.3271589279174805, |
|
"learning_rate": 6.360000000000001e-06, |
|
"loss": 0.7054, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1612115290669272, |
|
"grad_norm": 4.195059776306152, |
|
"learning_rate": 6.560000000000001e-06, |
|
"loss": 0.6424, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.16609672691744015, |
|
"grad_norm": 4.491285800933838, |
|
"learning_rate": 6.760000000000001e-06, |
|
"loss": 0.7025, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.1709819247679531, |
|
"grad_norm": 5.474099636077881, |
|
"learning_rate": 6.96e-06, |
|
"loss": 0.6373, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.17586712261846604, |
|
"grad_norm": 4.050487995147705, |
|
"learning_rate": 7.16e-06, |
|
"loss": 0.5873, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.180752320468979, |
|
"grad_norm": 5.067830562591553, |
|
"learning_rate": 7.360000000000001e-06, |
|
"loss": 0.6215, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.18563751831949193, |
|
"grad_norm": 4.530464172363281, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 0.6653, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.19052271617000488, |
|
"grad_norm": 3.94427490234375, |
|
"learning_rate": 7.76e-06, |
|
"loss": 0.6384, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.19540791402051783, |
|
"grad_norm": 4.769630432128906, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 0.6221, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.19540791402051783, |
|
"eval_loss": 0.6273319721221924, |
|
"eval_runtime": 4292.0244, |
|
"eval_samples_per_second": 0.954, |
|
"eval_steps_per_second": 0.119, |
|
"eval_wer": 46.51759428984058, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.20029311187103077, |
|
"grad_norm": 5.084413528442383, |
|
"learning_rate": 8.16e-06, |
|
"loss": 0.6848, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.20517830972154372, |
|
"grad_norm": 4.525864124298096, |
|
"learning_rate": 8.36e-06, |
|
"loss": 0.6234, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.21006350757205666, |
|
"grad_norm": 4.084338188171387, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 0.6272, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2149487054225696, |
|
"grad_norm": 4.2073187828063965, |
|
"learning_rate": 8.76e-06, |
|
"loss": 0.6075, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.21983390327308255, |
|
"grad_norm": 3.7465310096740723, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.5911, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2247191011235955, |
|
"grad_norm": 3.5266239643096924, |
|
"learning_rate": 9.16e-06, |
|
"loss": 0.5499, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.22960429897410845, |
|
"grad_norm": 4.592888832092285, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 0.6124, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2344894968246214, |
|
"grad_norm": 4.9025726318359375, |
|
"learning_rate": 9.56e-06, |
|
"loss": 0.5697, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.23937469467513434, |
|
"grad_norm": 4.163031101226807, |
|
"learning_rate": 9.760000000000001e-06, |
|
"loss": 0.6106, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.24425989252564728, |
|
"grad_norm": 5.005125045776367, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 0.6138, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.24914509037616023, |
|
"grad_norm": 3.767075538635254, |
|
"learning_rate": 9.982222222222224e-06, |
|
"loss": 0.601, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.2540302882266732, |
|
"grad_norm": 4.501799583435059, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 0.5664, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.2589154860771861, |
|
"grad_norm": 4.360989570617676, |
|
"learning_rate": 9.937777777777779e-06, |
|
"loss": 0.5782, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.26380068392769906, |
|
"grad_norm": 3.4221303462982178, |
|
"learning_rate": 9.915555555555556e-06, |
|
"loss": 0.585, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.268685881778212, |
|
"grad_norm": 4.4359540939331055, |
|
"learning_rate": 9.893333333333334e-06, |
|
"loss": 0.6203, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.27357107962872496, |
|
"grad_norm": 4.127051830291748, |
|
"learning_rate": 9.871111111111112e-06, |
|
"loss": 0.5032, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2784562774792379, |
|
"grad_norm": 4.420245170593262, |
|
"learning_rate": 9.84888888888889e-06, |
|
"loss": 0.5203, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.28334147532975085, |
|
"grad_norm": 3.9797379970550537, |
|
"learning_rate": 9.826666666666667e-06, |
|
"loss": 0.5655, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2882266731802638, |
|
"grad_norm": 4.368413925170898, |
|
"learning_rate": 9.804444444444444e-06, |
|
"loss": 0.5212, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.29311187103077674, |
|
"grad_norm": 4.136660099029541, |
|
"learning_rate": 9.782222222222222e-06, |
|
"loss": 0.5569, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.29311187103077674, |
|
"eval_loss": 0.5524213910102844, |
|
"eval_runtime": 4286.0796, |
|
"eval_samples_per_second": 0.955, |
|
"eval_steps_per_second": 0.119, |
|
"eval_wer": 43.251471976892745, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2979970688812897, |
|
"grad_norm": 4.450418472290039, |
|
"learning_rate": 9.760000000000001e-06, |
|
"loss": 0.5764, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.30288226673180263, |
|
"grad_norm": 3.492920160293579, |
|
"learning_rate": 9.737777777777779e-06, |
|
"loss": 0.5373, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.3077674645823156, |
|
"grad_norm": 4.476955890655518, |
|
"learning_rate": 9.715555555555557e-06, |
|
"loss": 0.4991, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3126526624328285, |
|
"grad_norm": 3.3835787773132324, |
|
"learning_rate": 9.693333333333334e-06, |
|
"loss": 0.5387, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.31753786028334147, |
|
"grad_norm": 3.3086416721343994, |
|
"learning_rate": 9.671111111111112e-06, |
|
"loss": 0.5414, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.3224230581338544, |
|
"grad_norm": 4.225744724273682, |
|
"learning_rate": 9.64888888888889e-06, |
|
"loss": 0.466, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.32730825598436736, |
|
"grad_norm": 4.349085807800293, |
|
"learning_rate": 9.626666666666667e-06, |
|
"loss": 0.5382, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3321934538348803, |
|
"grad_norm": 3.8492863178253174, |
|
"learning_rate": 9.604444444444445e-06, |
|
"loss": 0.4948, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.33707865168539325, |
|
"grad_norm": 3.621649980545044, |
|
"learning_rate": 9.582222222222222e-06, |
|
"loss": 0.4967, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.3419638495359062, |
|
"grad_norm": 4.60444974899292, |
|
"learning_rate": 9.56e-06, |
|
"loss": 0.4764, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.34684904738641914, |
|
"grad_norm": 3.711239814758301, |
|
"learning_rate": 9.537777777777778e-06, |
|
"loss": 0.5223, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.3517342452369321, |
|
"grad_norm": 3.4989352226257324, |
|
"learning_rate": 9.515555555555557e-06, |
|
"loss": 0.5236, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.35661944308744503, |
|
"grad_norm": 3.1489815711975098, |
|
"learning_rate": 9.493333333333334e-06, |
|
"loss": 0.5124, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.361504640937958, |
|
"grad_norm": 4.3361735343933105, |
|
"learning_rate": 9.471111111111112e-06, |
|
"loss": 0.5418, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3663898387884709, |
|
"grad_norm": 2.7804179191589355, |
|
"learning_rate": 9.44888888888889e-06, |
|
"loss": 0.4497, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.37127503663898387, |
|
"grad_norm": 3.681450605392456, |
|
"learning_rate": 9.426666666666667e-06, |
|
"loss": 0.4977, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3761602344894968, |
|
"grad_norm": 4.99979829788208, |
|
"learning_rate": 9.404444444444445e-06, |
|
"loss": 0.5545, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.38104543234000976, |
|
"grad_norm": 3.4073972702026367, |
|
"learning_rate": 9.382222222222223e-06, |
|
"loss": 0.4368, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.3859306301905227, |
|
"grad_norm": 3.558271646499634, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 0.4994, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.39081582804103565, |
|
"grad_norm": 3.8624231815338135, |
|
"learning_rate": 9.33777777777778e-06, |
|
"loss": 0.4785, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.39081582804103565, |
|
"eval_loss": 0.5109024047851562, |
|
"eval_runtime": 4283.004, |
|
"eval_samples_per_second": 0.956, |
|
"eval_steps_per_second": 0.12, |
|
"eval_wer": 40.329389546186746, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.3957010258915486, |
|
"grad_norm": 5.084819793701172, |
|
"learning_rate": 9.315555555555557e-06, |
|
"loss": 0.5334, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.40058622374206154, |
|
"grad_norm": 4.143725872039795, |
|
"learning_rate": 9.293333333333335e-06, |
|
"loss": 0.4547, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4054714215925745, |
|
"grad_norm": 4.0059614181518555, |
|
"learning_rate": 9.271111111111112e-06, |
|
"loss": 0.5135, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.41035661944308743, |
|
"grad_norm": 4.0563483238220215, |
|
"learning_rate": 9.24888888888889e-06, |
|
"loss": 0.4474, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4152418172936004, |
|
"grad_norm": 3.5411062240600586, |
|
"learning_rate": 9.226666666666668e-06, |
|
"loss": 0.5204, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.4201270151441133, |
|
"grad_norm": 3.515293836593628, |
|
"learning_rate": 9.204444444444445e-06, |
|
"loss": 0.514, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.42501221299462627, |
|
"grad_norm": 3.598378896713257, |
|
"learning_rate": 9.182222222222223e-06, |
|
"loss": 0.4408, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.4298974108451392, |
|
"grad_norm": 3.1513373851776123, |
|
"learning_rate": 9.16e-06, |
|
"loss": 0.4575, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 5.0970377922058105, |
|
"learning_rate": 9.137777777777778e-06, |
|
"loss": 0.4901, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.4396678065461651, |
|
"grad_norm": 4.093622207641602, |
|
"learning_rate": 9.115555555555556e-06, |
|
"loss": 0.4744, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.44455300439667805, |
|
"grad_norm": 4.782330513000488, |
|
"learning_rate": 9.093333333333333e-06, |
|
"loss": 0.4592, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.449438202247191, |
|
"grad_norm": 2.7775630950927734, |
|
"learning_rate": 9.07111111111111e-06, |
|
"loss": 0.511, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.45432340009770394, |
|
"grad_norm": 3.0217251777648926, |
|
"learning_rate": 9.048888888888888e-06, |
|
"loss": 0.4973, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.4592085979482169, |
|
"grad_norm": 4.689064025878906, |
|
"learning_rate": 9.026666666666666e-06, |
|
"loss": 0.5676, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.46409379579872984, |
|
"grad_norm": 3.9128365516662598, |
|
"learning_rate": 9.004444444444445e-06, |
|
"loss": 0.4762, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.4689789936492428, |
|
"grad_norm": 2.82621169090271, |
|
"learning_rate": 8.982222222222223e-06, |
|
"loss": 0.4318, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.4738641914997557, |
|
"grad_norm": 3.4691481590270996, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.4465, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.4787493893502687, |
|
"grad_norm": 4.63604736328125, |
|
"learning_rate": 8.937777777777778e-06, |
|
"loss": 0.4895, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4836345872007816, |
|
"grad_norm": 4.685360431671143, |
|
"learning_rate": 8.915555555555556e-06, |
|
"loss": 0.5155, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.48851978505129456, |
|
"grad_norm": 3.411714553833008, |
|
"learning_rate": 8.893333333333333e-06, |
|
"loss": 0.5216, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.48851978505129456, |
|
"eval_loss": 0.48320430517196655, |
|
"eval_runtime": 4282.5384, |
|
"eval_samples_per_second": 0.956, |
|
"eval_steps_per_second": 0.12, |
|
"eval_wer": 38.85151085930122, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4934049829018075, |
|
"grad_norm": 3.4970908164978027, |
|
"learning_rate": 8.871111111111111e-06, |
|
"loss": 0.4371, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.49829018075232046, |
|
"grad_norm": 4.287569522857666, |
|
"learning_rate": 8.848888888888889e-06, |
|
"loss": 0.4882, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5031753786028335, |
|
"grad_norm": 4.18141508102417, |
|
"learning_rate": 8.826666666666668e-06, |
|
"loss": 0.4955, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5080605764533463, |
|
"grad_norm": 3.6604855060577393, |
|
"learning_rate": 8.804444444444446e-06, |
|
"loss": 0.4448, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.5129457743038593, |
|
"grad_norm": 4.121953010559082, |
|
"learning_rate": 8.782222222222223e-06, |
|
"loss": 0.5134, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.5178309721543722, |
|
"grad_norm": 4.355337142944336, |
|
"learning_rate": 8.76e-06, |
|
"loss": 0.5069, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.5227161700048852, |
|
"grad_norm": 3.3341116905212402, |
|
"learning_rate": 8.737777777777778e-06, |
|
"loss": 0.5008, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5276013678553981, |
|
"grad_norm": 4.42513370513916, |
|
"learning_rate": 8.715555555555556e-06, |
|
"loss": 0.4557, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5324865657059111, |
|
"grad_norm": 2.6197452545166016, |
|
"learning_rate": 8.693333333333334e-06, |
|
"loss": 0.458, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.537371763556424, |
|
"grad_norm": 2.7881979942321777, |
|
"learning_rate": 8.671111111111113e-06, |
|
"loss": 0.4692, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.542256961406937, |
|
"grad_norm": 3.419318914413452, |
|
"learning_rate": 8.64888888888889e-06, |
|
"loss": 0.4541, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5471421592574499, |
|
"grad_norm": 3.8821029663085938, |
|
"learning_rate": 8.626666666666668e-06, |
|
"loss": 0.5263, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5520273571079629, |
|
"grad_norm": 3.7360761165618896, |
|
"learning_rate": 8.604444444444446e-06, |
|
"loss": 0.479, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5569125549584758, |
|
"grad_norm": 3.3444161415100098, |
|
"learning_rate": 8.582222222222223e-06, |
|
"loss": 0.4942, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5617977528089888, |
|
"grad_norm": 3.6646621227264404, |
|
"learning_rate": 8.560000000000001e-06, |
|
"loss": 0.4129, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5666829506595017, |
|
"grad_norm": 3.4366016387939453, |
|
"learning_rate": 8.537777777777779e-06, |
|
"loss": 0.4324, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5715681485100147, |
|
"grad_norm": 3.347890615463257, |
|
"learning_rate": 8.515555555555556e-06, |
|
"loss": 0.4797, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.5764533463605276, |
|
"grad_norm": 3.3374993801116943, |
|
"learning_rate": 8.493333333333334e-06, |
|
"loss": 0.4665, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.5813385442110406, |
|
"grad_norm": 3.522141933441162, |
|
"learning_rate": 8.471111111111112e-06, |
|
"loss": 0.4141, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.5862237420615535, |
|
"grad_norm": 3.1512022018432617, |
|
"learning_rate": 8.448888888888889e-06, |
|
"loss": 0.3981, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.5862237420615535, |
|
"eval_loss": 0.4652426540851593, |
|
"eval_runtime": 4278.5694, |
|
"eval_samples_per_second": 0.957, |
|
"eval_steps_per_second": 0.12, |
|
"eval_wer": 37.93360828750764, |
|
"step": 1200 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.540839686144e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|