|
{ |
|
"best_metric": 0.6183362007141113, |
|
"best_model_checkpoint": "governancev1/checkpoint-310", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 310, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04838709677419355, |
|
"grad_norm": 3.0842931270599365, |
|
"learning_rate": 9.67741935483871e-07, |
|
"loss": 1.765, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0967741935483871, |
|
"grad_norm": 3.169724702835083, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 1.805, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.14516129032258066, |
|
"grad_norm": 3.4454946517944336, |
|
"learning_rate": 2.903225806451613e-06, |
|
"loss": 1.805, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1935483870967742, |
|
"grad_norm": 2.817859649658203, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 1.8016, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.24193548387096775, |
|
"grad_norm": 4.190126419067383, |
|
"learning_rate": 4.838709677419355e-06, |
|
"loss": 1.7943, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.2903225806451613, |
|
"grad_norm": 3.602416515350342, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 1.784, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.3387096774193548, |
|
"grad_norm": 3.737297773361206, |
|
"learning_rate": 6.774193548387097e-06, |
|
"loss": 1.7862, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 3.9539337158203125, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 1.8032, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.43548387096774194, |
|
"grad_norm": 3.630708932876587, |
|
"learning_rate": 8.70967741935484e-06, |
|
"loss": 1.7825, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4838709677419355, |
|
"grad_norm": 2.1453819274902344, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 1.8035, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.532258064516129, |
|
"grad_norm": 3.154428243637085, |
|
"learning_rate": 9.928315412186382e-06, |
|
"loss": 1.778, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5806451612903226, |
|
"grad_norm": 2.720580816268921, |
|
"learning_rate": 9.820788530465952e-06, |
|
"loss": 1.7578, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.6290322580645161, |
|
"grad_norm": 2.502385139465332, |
|
"learning_rate": 9.71326164874552e-06, |
|
"loss": 1.7854, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6774193548387096, |
|
"grad_norm": 4.814332485198975, |
|
"learning_rate": 9.60573476702509e-06, |
|
"loss": 1.7794, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.7258064516129032, |
|
"grad_norm": 4.313498497009277, |
|
"learning_rate": 9.49820788530466e-06, |
|
"loss": 1.7557, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 5.430189609527588, |
|
"learning_rate": 9.39068100358423e-06, |
|
"loss": 1.778, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.8225806451612904, |
|
"grad_norm": 3.3871169090270996, |
|
"learning_rate": 9.2831541218638e-06, |
|
"loss": 1.7578, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8709677419354839, |
|
"grad_norm": 3.650548219680786, |
|
"learning_rate": 9.17562724014337e-06, |
|
"loss": 1.7563, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.9193548387096774, |
|
"grad_norm": 2.9331789016723633, |
|
"learning_rate": 9.068100358422939e-06, |
|
"loss": 1.7639, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.967741935483871, |
|
"grad_norm": 4.592463970184326, |
|
"learning_rate": 8.96057347670251e-06, |
|
"loss": 1.7155, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.5853658536585366, |
|
"eval_f1_macro": 0.5538981611199739, |
|
"eval_f1_micro": 0.5853658536585366, |
|
"eval_f1_weighted": 0.5579311937490847, |
|
"eval_loss": 1.721036434173584, |
|
"eval_precision_macro": 0.6902878530637443, |
|
"eval_precision_micro": 0.5853658536585366, |
|
"eval_precision_weighted": 0.6892010083860678, |
|
"eval_recall_macro": 0.5789682539682539, |
|
"eval_recall_micro": 0.5853658536585366, |
|
"eval_recall_weighted": 0.5853658536585366, |
|
"eval_runtime": 0.706, |
|
"eval_samples_per_second": 174.221, |
|
"eval_steps_per_second": 11.331, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.0161290322580645, |
|
"grad_norm": 3.955127477645874, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.7336, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.064516129032258, |
|
"grad_norm": 3.8463294506073, |
|
"learning_rate": 8.78136200716846e-06, |
|
"loss": 1.7033, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.1129032258064515, |
|
"grad_norm": 5.382518768310547, |
|
"learning_rate": 8.67383512544803e-06, |
|
"loss": 1.7024, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.1612903225806452, |
|
"grad_norm": 5.656961441040039, |
|
"learning_rate": 8.5663082437276e-06, |
|
"loss": 1.6256, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.2096774193548387, |
|
"grad_norm": 4.990180492401123, |
|
"learning_rate": 8.45878136200717e-06, |
|
"loss": 1.6167, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.2580645161290323, |
|
"grad_norm": 6.717923164367676, |
|
"learning_rate": 8.35125448028674e-06, |
|
"loss": 1.6665, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.3064516129032258, |
|
"grad_norm": 6.241459846496582, |
|
"learning_rate": 8.24372759856631e-06, |
|
"loss": 1.5133, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.3548387096774195, |
|
"grad_norm": 7.923880100250244, |
|
"learning_rate": 8.136200716845879e-06, |
|
"loss": 1.4838, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.403225806451613, |
|
"grad_norm": 6.954166889190674, |
|
"learning_rate": 8.028673835125449e-06, |
|
"loss": 1.5632, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.4516129032258065, |
|
"grad_norm": 8.967968940734863, |
|
"learning_rate": 7.921146953405019e-06, |
|
"loss": 1.5475, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 7.038327693939209, |
|
"learning_rate": 7.813620071684589e-06, |
|
"loss": 1.3987, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.5483870967741935, |
|
"grad_norm": 5.91029167175293, |
|
"learning_rate": 7.706093189964159e-06, |
|
"loss": 1.4075, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.596774193548387, |
|
"grad_norm": 5.255452632904053, |
|
"learning_rate": 7.5985663082437275e-06, |
|
"loss": 1.4156, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.6451612903225805, |
|
"grad_norm": 6.4161858558654785, |
|
"learning_rate": 7.491039426523297e-06, |
|
"loss": 1.4282, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.6935483870967742, |
|
"grad_norm": 6.7246270179748535, |
|
"learning_rate": 7.383512544802868e-06, |
|
"loss": 1.3218, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.7419354838709677, |
|
"grad_norm": 7.8033671379089355, |
|
"learning_rate": 7.275985663082438e-06, |
|
"loss": 1.3164, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.7903225806451613, |
|
"grad_norm": 7.699241638183594, |
|
"learning_rate": 7.168458781362008e-06, |
|
"loss": 1.2532, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.838709677419355, |
|
"grad_norm": 6.70715856552124, |
|
"learning_rate": 7.060931899641578e-06, |
|
"loss": 1.2094, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.8870967741935485, |
|
"grad_norm": 6.230452060699463, |
|
"learning_rate": 6.9534050179211476e-06, |
|
"loss": 1.3213, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.935483870967742, |
|
"grad_norm": 5.785920143127441, |
|
"learning_rate": 6.8458781362007174e-06, |
|
"loss": 1.2513, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.9838709677419355, |
|
"grad_norm": 4.86808967590332, |
|
"learning_rate": 6.738351254480287e-06, |
|
"loss": 1.1626, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7154471544715447, |
|
"eval_f1_macro": 0.6924669353531142, |
|
"eval_f1_micro": 0.7154471544715447, |
|
"eval_f1_weighted": 0.6954298446465806, |
|
"eval_loss": 1.1070369482040405, |
|
"eval_precision_macro": 0.7450460829493087, |
|
"eval_precision_micro": 0.7154471544715447, |
|
"eval_precision_weighted": 0.7457963358435428, |
|
"eval_recall_macro": 0.7115079365079365, |
|
"eval_recall_micro": 0.7154471544715447, |
|
"eval_recall_weighted": 0.7154471544715447, |
|
"eval_runtime": 0.72, |
|
"eval_samples_per_second": 170.83, |
|
"eval_steps_per_second": 11.111, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.032258064516129, |
|
"grad_norm": 4.532644748687744, |
|
"learning_rate": 6.630824372759857e-06, |
|
"loss": 0.9685, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.0806451612903225, |
|
"grad_norm": 5.968006134033203, |
|
"learning_rate": 6.523297491039428e-06, |
|
"loss": 1.2298, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.129032258064516, |
|
"grad_norm": 5.891108989715576, |
|
"learning_rate": 6.415770609318996e-06, |
|
"loss": 1.1047, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.1774193548387095, |
|
"grad_norm": 5.493295192718506, |
|
"learning_rate": 6.308243727598567e-06, |
|
"loss": 1.0527, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.225806451612903, |
|
"grad_norm": 5.447644233703613, |
|
"learning_rate": 6.200716845878137e-06, |
|
"loss": 0.8857, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.274193548387097, |
|
"grad_norm": 5.241428852081299, |
|
"learning_rate": 6.0931899641577065e-06, |
|
"loss": 0.9137, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.3225806451612905, |
|
"grad_norm": 7.360194683074951, |
|
"learning_rate": 5.985663082437276e-06, |
|
"loss": 1.1215, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.370967741935484, |
|
"grad_norm": 8.852798461914062, |
|
"learning_rate": 5.878136200716846e-06, |
|
"loss": 1.0209, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.4193548387096775, |
|
"grad_norm": 8.410216331481934, |
|
"learning_rate": 5.770609318996416e-06, |
|
"loss": 1.0577, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.467741935483871, |
|
"grad_norm": 8.898208618164062, |
|
"learning_rate": 5.663082437275986e-06, |
|
"loss": 0.9082, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.5161290322580645, |
|
"grad_norm": 6.314516067504883, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.79, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.564516129032258, |
|
"grad_norm": 7.746774673461914, |
|
"learning_rate": 5.4480286738351265e-06, |
|
"loss": 1.0791, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.6129032258064515, |
|
"grad_norm": 5.784223556518555, |
|
"learning_rate": 5.340501792114696e-06, |
|
"loss": 0.8398, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.661290322580645, |
|
"grad_norm": 6.672528266906738, |
|
"learning_rate": 5.232974910394266e-06, |
|
"loss": 0.8742, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.709677419354839, |
|
"grad_norm": 6.1544647216796875, |
|
"learning_rate": 5.125448028673835e-06, |
|
"loss": 0.9219, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.758064516129032, |
|
"grad_norm": 6.208500862121582, |
|
"learning_rate": 5.017921146953405e-06, |
|
"loss": 0.8854, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.806451612903226, |
|
"grad_norm": 7.67927885055542, |
|
"learning_rate": 4.910394265232976e-06, |
|
"loss": 0.7603, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.8548387096774195, |
|
"grad_norm": 9.24992561340332, |
|
"learning_rate": 4.802867383512545e-06, |
|
"loss": 0.819, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.903225806451613, |
|
"grad_norm": 7.481741428375244, |
|
"learning_rate": 4.695340501792115e-06, |
|
"loss": 0.8486, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.9516129032258065, |
|
"grad_norm": 8.279362678527832, |
|
"learning_rate": 4.587813620071685e-06, |
|
"loss": 0.7911, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 6.6752448081970215, |
|
"learning_rate": 4.480286738351255e-06, |
|
"loss": 0.5699, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.7804878048780488, |
|
"eval_f1_macro": 0.7723212747631352, |
|
"eval_f1_micro": 0.7804878048780488, |
|
"eval_f1_weighted": 0.7737375265225521, |
|
"eval_loss": 0.7607462406158447, |
|
"eval_precision_macro": 0.7946850438417155, |
|
"eval_precision_micro": 0.7804878048780488, |
|
"eval_precision_weighted": 0.7944935514504388, |
|
"eval_recall_macro": 0.7777777777777777, |
|
"eval_recall_micro": 0.7804878048780488, |
|
"eval_recall_weighted": 0.7804878048780488, |
|
"eval_runtime": 0.7085, |
|
"eval_samples_per_second": 173.617, |
|
"eval_steps_per_second": 11.292, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 3.0483870967741935, |
|
"grad_norm": 6.760964393615723, |
|
"learning_rate": 4.372759856630825e-06, |
|
"loss": 0.727, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 3.096774193548387, |
|
"grad_norm": 5.095468044281006, |
|
"learning_rate": 4.265232974910394e-06, |
|
"loss": 0.6731, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 3.1451612903225805, |
|
"grad_norm": 9.518759727478027, |
|
"learning_rate": 4.157706093189964e-06, |
|
"loss": 0.8496, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.193548387096774, |
|
"grad_norm": 6.321383953094482, |
|
"learning_rate": 4.050179211469534e-06, |
|
"loss": 0.7368, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 3.241935483870968, |
|
"grad_norm": 5.962444305419922, |
|
"learning_rate": 3.942652329749105e-06, |
|
"loss": 0.6389, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 3.2903225806451615, |
|
"grad_norm": 6.869384765625, |
|
"learning_rate": 3.8351254480286745e-06, |
|
"loss": 0.6852, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 3.338709677419355, |
|
"grad_norm": 7.892813205718994, |
|
"learning_rate": 3.7275985663082444e-06, |
|
"loss": 0.7319, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 3.3870967741935485, |
|
"grad_norm": 9.712183952331543, |
|
"learning_rate": 3.620071684587814e-06, |
|
"loss": 0.7502, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.435483870967742, |
|
"grad_norm": 5.5098795890808105, |
|
"learning_rate": 3.5125448028673837e-06, |
|
"loss": 0.4983, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 3.4838709677419355, |
|
"grad_norm": 6.6952619552612305, |
|
"learning_rate": 3.4050179211469536e-06, |
|
"loss": 0.6215, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 3.532258064516129, |
|
"grad_norm": 8.5636625289917, |
|
"learning_rate": 3.2974910394265234e-06, |
|
"loss": 0.7535, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 3.5806451612903225, |
|
"grad_norm": 6.691638946533203, |
|
"learning_rate": 3.1899641577060937e-06, |
|
"loss": 0.8351, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 3.629032258064516, |
|
"grad_norm": 7.349640369415283, |
|
"learning_rate": 3.0824372759856636e-06, |
|
"loss": 0.6259, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.6774193548387095, |
|
"grad_norm": 6.965548515319824, |
|
"learning_rate": 2.974910394265233e-06, |
|
"loss": 0.7365, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 3.725806451612903, |
|
"grad_norm": 5.973796844482422, |
|
"learning_rate": 2.867383512544803e-06, |
|
"loss": 0.669, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.774193548387097, |
|
"grad_norm": 6.919253826141357, |
|
"learning_rate": 2.7598566308243727e-06, |
|
"loss": 0.6993, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.8225806451612905, |
|
"grad_norm": 6.3397908210754395, |
|
"learning_rate": 2.652329749103943e-06, |
|
"loss": 0.7181, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.870967741935484, |
|
"grad_norm": 5.785044193267822, |
|
"learning_rate": 2.544802867383513e-06, |
|
"loss": 0.4398, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.9193548387096775, |
|
"grad_norm": 5.988158226013184, |
|
"learning_rate": 2.4372759856630828e-06, |
|
"loss": 0.6283, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.967741935483871, |
|
"grad_norm": 5.8424973487854, |
|
"learning_rate": 2.3297491039426526e-06, |
|
"loss": 0.579, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.8130081300813008, |
|
"eval_f1_macro": 0.808283912469959, |
|
"eval_f1_micro": 0.8130081300813008, |
|
"eval_f1_weighted": 0.8093660017426324, |
|
"eval_loss": 0.6489768624305725, |
|
"eval_precision_macro": 0.8281224884633707, |
|
"eval_precision_micro": 0.8130081300813008, |
|
"eval_precision_weighted": 0.8282651571090791, |
|
"eval_recall_macro": 0.8111111111111112, |
|
"eval_recall_micro": 0.8130081300813008, |
|
"eval_recall_weighted": 0.8130081300813008, |
|
"eval_runtime": 0.7293, |
|
"eval_samples_per_second": 168.664, |
|
"eval_steps_per_second": 10.97, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 4.016129032258065, |
|
"grad_norm": 6.205898761749268, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.6333, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 4.064516129032258, |
|
"grad_norm": 10.515931129455566, |
|
"learning_rate": 2.1146953405017924e-06, |
|
"loss": 0.8123, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 4.112903225806452, |
|
"grad_norm": 8.230704307556152, |
|
"learning_rate": 2.0071684587813622e-06, |
|
"loss": 0.4461, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.161290322580645, |
|
"grad_norm": 5.984982490539551, |
|
"learning_rate": 1.8996415770609319e-06, |
|
"loss": 0.6573, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 4.209677419354839, |
|
"grad_norm": 9.903221130371094, |
|
"learning_rate": 1.792114695340502e-06, |
|
"loss": 0.7067, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 4.258064516129032, |
|
"grad_norm": 5.222007751464844, |
|
"learning_rate": 1.6845878136200718e-06, |
|
"loss": 0.4508, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 4.306451612903226, |
|
"grad_norm": 6.386465072631836, |
|
"learning_rate": 1.5770609318996417e-06, |
|
"loss": 0.566, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 4.354838709677419, |
|
"grad_norm": 9.981949806213379, |
|
"learning_rate": 1.4695340501792116e-06, |
|
"loss": 0.6265, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.403225806451613, |
|
"grad_norm": 5.25981330871582, |
|
"learning_rate": 1.3620071684587816e-06, |
|
"loss": 0.4726, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 4.451612903225806, |
|
"grad_norm": 6.81918478012085, |
|
"learning_rate": 1.2544802867383513e-06, |
|
"loss": 0.5897, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 4.158679008483887, |
|
"learning_rate": 1.1469534050179212e-06, |
|
"loss": 0.4612, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 4.548387096774194, |
|
"grad_norm": 5.195897579193115, |
|
"learning_rate": 1.039426523297491e-06, |
|
"loss": 0.6686, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 4.596774193548387, |
|
"grad_norm": 4.113834857940674, |
|
"learning_rate": 9.318996415770611e-07, |
|
"loss": 0.4569, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 4.645161290322581, |
|
"grad_norm": 6.790894031524658, |
|
"learning_rate": 8.243727598566309e-07, |
|
"loss": 0.6289, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 4.693548387096774, |
|
"grad_norm": 5.653354644775391, |
|
"learning_rate": 7.168458781362007e-07, |
|
"loss": 0.4998, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 4.741935483870968, |
|
"grad_norm": 14.646848678588867, |
|
"learning_rate": 6.093189964157707e-07, |
|
"loss": 0.6761, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 4.790322580645161, |
|
"grad_norm": 5.255918025970459, |
|
"learning_rate": 5.017921146953406e-07, |
|
"loss": 0.5684, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 4.838709677419355, |
|
"grad_norm": 5.386866569519043, |
|
"learning_rate": 3.942652329749104e-07, |
|
"loss": 0.4544, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 4.887096774193548, |
|
"grad_norm": 8.672043800354004, |
|
"learning_rate": 2.867383512544803e-07, |
|
"loss": 0.6241, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 4.935483870967742, |
|
"grad_norm": 7.205950736999512, |
|
"learning_rate": 1.7921146953405018e-07, |
|
"loss": 0.5675, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 4.983870967741936, |
|
"grad_norm": 12.01904582977295, |
|
"learning_rate": 7.168458781362007e-08, |
|
"loss": 0.5988, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8211382113821138, |
|
"eval_f1_macro": 0.8169326711187176, |
|
"eval_f1_micro": 0.8211382113821138, |
|
"eval_f1_weighted": 0.8178385589956776, |
|
"eval_loss": 0.6183362007141113, |
|
"eval_precision_macro": 0.8425385791952508, |
|
"eval_precision_micro": 0.8211382113821138, |
|
"eval_precision_weighted": 0.8430523097652948, |
|
"eval_recall_macro": 0.8198412698412699, |
|
"eval_recall_micro": 0.8211382113821138, |
|
"eval_recall_weighted": 0.8211382113821138, |
|
"eval_runtime": 0.723, |
|
"eval_samples_per_second": 170.115, |
|
"eval_steps_per_second": 11.064, |
|
"step": 310 |
|
} |
|
], |
|
"logging_steps": 3, |
|
"max_steps": 310, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 323905893672960.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|