ptsft150 / trainer_state.json
cjfcsjt's picture
Upload folder using huggingface_hub
34b3d9a verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 75.0,
"eval_steps": 1000,
"global_step": 150,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.5,
"grad_norm": 0.0,
"learning_rate": 0,
"loss": 1.3402,
"step": 1
},
{
"epoch": 1.0,
"grad_norm": 0.0,
"learning_rate": 0,
"loss": 1.286,
"step": 2
},
{
"epoch": 1.5,
"grad_norm": 0.0,
"learning_rate": 0,
"loss": 1.3382,
"step": 3
},
{
"epoch": 2.0,
"grad_norm": 6.078482627868652,
"learning_rate": 0.0,
"loss": 1.2555,
"step": 4
},
{
"epoch": 2.5,
"grad_norm": 5.590641975402832,
"learning_rate": 1.5051499783199055e-07,
"loss": 1.2272,
"step": 5
},
{
"epoch": 3.0,
"grad_norm": 5.590641975402832,
"learning_rate": 1.5051499783199055e-07,
"loss": 1.3339,
"step": 6
},
{
"epoch": 3.5,
"grad_norm": 5.590641975402832,
"learning_rate": 1.5051499783199055e-07,
"loss": 1.4161,
"step": 7
},
{
"epoch": 4.0,
"grad_norm": 11.268411636352539,
"learning_rate": 2.385606273598312e-07,
"loss": 1.362,
"step": 8
},
{
"epoch": 4.5,
"grad_norm": 9.850939750671387,
"learning_rate": 3.010299956639811e-07,
"loss": 1.1579,
"step": 9
},
{
"epoch": 5.0,
"grad_norm": 11.71530532836914,
"learning_rate": 3.494850021680093e-07,
"loss": 1.4335,
"step": 10
},
{
"epoch": 5.5,
"grad_norm": 5.690868377685547,
"learning_rate": 3.8907562519182173e-07,
"loss": 1.2658,
"step": 11
},
{
"epoch": 6.0,
"grad_norm": 10.700072288513184,
"learning_rate": 4.2254902000712834e-07,
"loss": 1.3482,
"step": 12
},
{
"epoch": 6.5,
"grad_norm": 6.189509391784668,
"learning_rate": 4.5154499349597166e-07,
"loss": 1.3464,
"step": 13
},
{
"epoch": 7.0,
"grad_norm": 6.247044086456299,
"learning_rate": 4.771212547196623e-07,
"loss": 1.2738,
"step": 14
},
{
"epoch": 7.5,
"grad_norm": 6.324206829071045,
"learning_rate": 4.999999999999999e-07,
"loss": 1.2795,
"step": 15
},
{
"epoch": 8.0,
"grad_norm": 5.119565486907959,
"learning_rate": 5.206963425791124e-07,
"loss": 1.281,
"step": 16
},
{
"epoch": 8.5,
"grad_norm": 5.067975044250488,
"learning_rate": 5.395906230238123e-07,
"loss": 1.4589,
"step": 17
},
{
"epoch": 9.0,
"grad_norm": 4.973406791687012,
"learning_rate": 5.569716761534182e-07,
"loss": 1.293,
"step": 18
},
{
"epoch": 9.5,
"grad_norm": 4.727417945861816,
"learning_rate": 5.730640178391189e-07,
"loss": 1.2502,
"step": 19
},
{
"epoch": 10.0,
"grad_norm": 4.404863357543945,
"learning_rate": 5.880456295278405e-07,
"loss": 1.2186,
"step": 20
},
{
"epoch": 10.5,
"grad_norm": 6.084046840667725,
"learning_rate": 6.020599913279622e-07,
"loss": 1.4264,
"step": 21
},
{
"epoch": 11.0,
"grad_norm": 4.772565841674805,
"learning_rate": 6.15224460689137e-07,
"loss": 1.2514,
"step": 22
},
{
"epoch": 11.5,
"grad_norm": 4.340854167938232,
"learning_rate": 6.276362525516529e-07,
"loss": 1.2459,
"step": 23
},
{
"epoch": 12.0,
"grad_norm": 3.630235433578491,
"learning_rate": 6.393768004764143e-07,
"loss": 1.0926,
"step": 24
},
{
"epoch": 12.5,
"grad_norm": 7.564579963684082,
"learning_rate": 6.505149978319905e-07,
"loss": 1.3692,
"step": 25
},
{
"epoch": 13.0,
"grad_norm": 4.304481029510498,
"learning_rate": 6.611096473669595e-07,
"loss": 1.0156,
"step": 26
},
{
"epoch": 13.5,
"grad_norm": 5.493950366973877,
"learning_rate": 6.712113404111031e-07,
"loss": 1.2072,
"step": 27
},
{
"epoch": 14.0,
"grad_norm": 3.82694411277771,
"learning_rate": 6.808639180087963e-07,
"loss": 1.0592,
"step": 28
},
{
"epoch": 14.5,
"grad_norm": 3.7888131141662598,
"learning_rate": 6.901056208558029e-07,
"loss": 1.1043,
"step": 29
},
{
"epoch": 15.0,
"grad_norm": 3.5746841430664062,
"learning_rate": 6.989700043360186e-07,
"loss": 1.1532,
"step": 30
},
{
"epoch": 15.5,
"grad_norm": 4.591047286987305,
"learning_rate": 7.074866739854088e-07,
"loss": 1.2519,
"step": 31
},
{
"epoch": 16.0,
"grad_norm": 10.526527404785156,
"learning_rate": 7.156818820794935e-07,
"loss": 1.1504,
"step": 32
},
{
"epoch": 16.5,
"grad_norm": 4.354953289031982,
"learning_rate": 7.235790156711094e-07,
"loss": 1.098,
"step": 33
},
{
"epoch": 17.0,
"grad_norm": 5.967930316925049,
"learning_rate": 7.311989989494779e-07,
"loss": 1.1957,
"step": 34
},
{
"epoch": 17.5,
"grad_norm": 3.097174644470215,
"learning_rate": 7.38560627359831e-07,
"loss": 1.073,
"step": 35
},
{
"epoch": 18.0,
"grad_norm": 3.9502739906311035,
"learning_rate": 7.456808469171361e-07,
"loss": 1.0042,
"step": 36
},
{
"epoch": 18.5,
"grad_norm": 3.2742667198181152,
"learning_rate": 7.525749891599529e-07,
"loss": 1.004,
"step": 37
},
{
"epoch": 19.0,
"grad_norm": 4.2321953773498535,
"learning_rate": 7.592569699389436e-07,
"loss": 1.0532,
"step": 38
},
{
"epoch": 19.5,
"grad_norm": 4.322045803070068,
"learning_rate": 7.657394585211274e-07,
"loss": 1.0297,
"step": 39
},
{
"epoch": 20.0,
"grad_norm": 3.5067355632781982,
"learning_rate": 7.720340221751376e-07,
"loss": 1.0541,
"step": 40
},
{
"epoch": 20.5,
"grad_norm": 3.1070497035980225,
"learning_rate": 7.781512503836435e-07,
"loss": 0.8917,
"step": 41
},
{
"epoch": 21.0,
"grad_norm": 3.413217067718506,
"learning_rate": 7.841008620334974e-07,
"loss": 1.0688,
"step": 42
},
{
"epoch": 21.5,
"grad_norm": 3.413217067718506,
"learning_rate": 7.841008620334974e-07,
"loss": 0.8956,
"step": 43
},
{
"epoch": 22.0,
"grad_norm": 31.582998275756836,
"learning_rate": 7.89891798308405e-07,
"loss": 1.1435,
"step": 44
},
{
"epoch": 22.5,
"grad_norm": 3.2715117931365967,
"learning_rate": 7.955323035132494e-07,
"loss": 1.023,
"step": 45
},
{
"epoch": 23.0,
"grad_norm": 3.081068754196167,
"learning_rate": 8.01029995663981e-07,
"loss": 0.9536,
"step": 46
},
{
"epoch": 23.5,
"grad_norm": 5.033097267150879,
"learning_rate": 8.063919283598676e-07,
"loss": 1.0542,
"step": 47
},
{
"epoch": 24.0,
"grad_norm": 3.2547528743743896,
"learning_rate": 8.116246451989502e-07,
"loss": 0.8736,
"step": 48
},
{
"epoch": 24.5,
"grad_norm": 3.2148780822753906,
"learning_rate": 8.16734227789793e-07,
"loss": 0.8366,
"step": 49
},
{
"epoch": 25.0,
"grad_norm": 3.271756887435913,
"learning_rate": 8.217263382430935e-07,
"loss": 0.9887,
"step": 50
},
{
"epoch": 25.5,
"grad_norm": 3.1897552013397217,
"learning_rate": 8.266062568876716e-07,
"loss": 0.9634,
"step": 51
},
{
"epoch": 26.0,
"grad_norm": 2.999152421951294,
"learning_rate": 8.313789158407869e-07,
"loss": 0.8479,
"step": 52
},
{
"epoch": 26.5,
"grad_norm": 3.126117467880249,
"learning_rate": 8.360489289678585e-07,
"loss": 0.9963,
"step": 53
},
{
"epoch": 27.0,
"grad_norm": 2.808807611465454,
"learning_rate": 8.406206186877934e-07,
"loss": 0.7693,
"step": 54
},
{
"epoch": 27.5,
"grad_norm": 3.8470795154571533,
"learning_rate": 8.450980400142567e-07,
"loss": 0.8929,
"step": 55
},
{
"epoch": 28.0,
"grad_norm": 4.059876918792725,
"learning_rate": 8.494850021680092e-07,
"loss": 0.9874,
"step": 56
},
{
"epoch": 28.5,
"grad_norm": 3.1297614574432373,
"learning_rate": 8.53785088048968e-07,
"loss": 0.8148,
"step": 57
},
{
"epoch": 29.0,
"grad_norm": 4.2221150398254395,
"learning_rate": 8.580016718173995e-07,
"loss": 0.9519,
"step": 58
},
{
"epoch": 29.5,
"grad_norm": 3.0946404933929443,
"learning_rate": 8.621379348003944e-07,
"loss": 0.8067,
"step": 59
},
{
"epoch": 30.0,
"grad_norm": 3.16498064994812,
"learning_rate": 8.661968799114842e-07,
"loss": 0.759,
"step": 60
},
{
"epoch": 30.5,
"grad_norm": 3.2981464862823486,
"learning_rate": 8.701813447471218e-07,
"loss": 0.8196,
"step": 61
},
{
"epoch": 31.0,
"grad_norm": 2.9492411613464355,
"learning_rate": 8.740940135031001e-07,
"loss": 0.8733,
"step": 62
},
{
"epoch": 31.5,
"grad_norm": 4.684609413146973,
"learning_rate": 8.779374278362456e-07,
"loss": 0.9314,
"step": 63
},
{
"epoch": 32.0,
"grad_norm": 2.6447761058807373,
"learning_rate": 8.817139967814684e-07,
"loss": 0.7206,
"step": 64
},
{
"epoch": 32.5,
"grad_norm": 3.544161319732666,
"learning_rate": 8.854260058210719e-07,
"loss": 0.7301,
"step": 65
},
{
"epoch": 33.0,
"grad_norm": 2.7136151790618896,
"learning_rate": 8.890756251918216e-07,
"loss": 0.7486,
"step": 66
},
{
"epoch": 33.5,
"grad_norm": 3.6095616817474365,
"learning_rate": 8.926649175053833e-07,
"loss": 0.6688,
"step": 67
},
{
"epoch": 34.0,
"grad_norm": 2.742135763168335,
"learning_rate": 8.961958447491268e-07,
"loss": 0.7577,
"step": 68
},
{
"epoch": 34.5,
"grad_norm": 3.166233777999878,
"learning_rate": 8.996702747267907e-07,
"loss": 0.6201,
"step": 69
},
{
"epoch": 35.0,
"grad_norm": 6.629209518432617,
"learning_rate": 9.030899869919433e-07,
"loss": 0.7546,
"step": 70
},
{
"epoch": 35.5,
"grad_norm": 3.2534568309783936,
"learning_rate": 9.064566783214276e-07,
"loss": 0.6966,
"step": 71
},
{
"epoch": 36.0,
"grad_norm": 2.9836554527282715,
"learning_rate": 9.097719677709341e-07,
"loss": 0.669,
"step": 72
},
{
"epoch": 36.5,
"grad_norm": 2.8371522426605225,
"learning_rate": 9.13037401350413e-07,
"loss": 0.602,
"step": 73
},
{
"epoch": 37.0,
"grad_norm": 3.1005606651306152,
"learning_rate": 9.162544563531181e-07,
"loss": 0.7351,
"step": 74
},
{
"epoch": 37.5,
"grad_norm": 2.771270751953125,
"learning_rate": 9.194245453686276e-07,
"loss": 0.5892,
"step": 75
},
{
"epoch": 38.0,
"grad_norm": 4.207065105438232,
"learning_rate": 9.225490200071283e-07,
"loss": 0.7159,
"step": 76
},
{
"epoch": 38.5,
"grad_norm": 4.0643439292907715,
"learning_rate": 9.256291743595375e-07,
"loss": 0.6424,
"step": 77
},
{
"epoch": 39.0,
"grad_norm": 3.463486433029175,
"learning_rate": 9.28666248215634e-07,
"loss": 0.6164,
"step": 78
},
{
"epoch": 39.5,
"grad_norm": 3.14841890335083,
"learning_rate": 9.316614300602277e-07,
"loss": 0.5615,
"step": 79
},
{
"epoch": 40.0,
"grad_norm": 2.958975076675415,
"learning_rate": 9.346158598654879e-07,
"loss": 0.5521,
"step": 80
},
{
"epoch": 40.5,
"grad_norm": 2.7486228942871094,
"learning_rate": 9.375306316958498e-07,
"loss": 0.5937,
"step": 81
},
{
"epoch": 41.0,
"grad_norm": 2.736597776412964,
"learning_rate": 9.404067961403955e-07,
"loss": 0.5238,
"step": 82
},
{
"epoch": 41.5,
"grad_norm": 2.9119350910186768,
"learning_rate": 9.432453625862408e-07,
"loss": 0.5296,
"step": 83
},
{
"epoch": 42.0,
"grad_norm": 3.870023727416992,
"learning_rate": 9.4604730134524e-07,
"loss": 0.5716,
"step": 84
},
{
"epoch": 42.5,
"grad_norm": 3.2765142917633057,
"learning_rate": 9.488135456452205e-07,
"loss": 0.597,
"step": 85
},
{
"epoch": 43.0,
"grad_norm": 3.575230598449707,
"learning_rate": 9.515449934959715e-07,
"loss": 0.5601,
"step": 86
},
{
"epoch": 43.5,
"grad_norm": 3.2691493034362793,
"learning_rate": 9.542425094393247e-07,
"loss": 0.5662,
"step": 87
},
{
"epoch": 44.0,
"grad_norm": 2.8934061527252197,
"learning_rate": 9.569069261918583e-07,
"loss": 0.5026,
"step": 88
},
{
"epoch": 44.5,
"grad_norm": 3.517810821533203,
"learning_rate": 9.59539046188037e-07,
"loss": 0.5107,
"step": 89
},
{
"epoch": 45.0,
"grad_norm": 3.1376760005950928,
"learning_rate": 9.621396430309406e-07,
"loss": 0.5166,
"step": 90
},
{
"epoch": 45.5,
"grad_norm": 3.4940109252929688,
"learning_rate": 9.647094628571462e-07,
"loss": 0.4549,
"step": 91
},
{
"epoch": 46.0,
"grad_norm": 6.771224498748779,
"learning_rate": 9.672492256217836e-07,
"loss": 0.4991,
"step": 92
},
{
"epoch": 46.5,
"grad_norm": 4.691510200500488,
"learning_rate": 9.69759626309309e-07,
"loss": 0.4521,
"step": 93
},
{
"epoch": 47.0,
"grad_norm": 3.1397156715393066,
"learning_rate": 9.722413360750842e-07,
"loss": 0.4287,
"step": 94
},
{
"epoch": 47.5,
"grad_norm": 3.4504847526550293,
"learning_rate": 9.74695003322456e-07,
"loss": 0.3875,
"step": 95
},
{
"epoch": 48.0,
"grad_norm": 2.7338361740112305,
"learning_rate": 9.771212547196622e-07,
"loss": 0.44,
"step": 96
},
{
"epoch": 48.5,
"grad_norm": 4.007640838623047,
"learning_rate": 9.795206961605466e-07,
"loss": 0.3561,
"step": 97
},
{
"epoch": 49.0,
"grad_norm": 3.637798309326172,
"learning_rate": 9.818939136727774e-07,
"loss": 0.4562,
"step": 98
},
{
"epoch": 49.5,
"grad_norm": 4.988349914550781,
"learning_rate": 9.842414742769674e-07,
"loss": 0.4038,
"step": 99
},
{
"epoch": 50.0,
"grad_norm": 3.512204885482788,
"learning_rate": 9.865639267998492e-07,
"loss": 0.3882,
"step": 100
},
{
"epoch": 50.5,
"grad_norm": 2.9533920288085938,
"learning_rate": 9.888618026444236e-07,
"loss": 0.3843,
"step": 101
},
{
"epoch": 51.0,
"grad_norm": 2.7523856163024902,
"learning_rate": 9.91135616519784e-07,
"loss": 0.3811,
"step": 102
},
{
"epoch": 51.5,
"grad_norm": 6.668997764587402,
"learning_rate": 9.933858671331222e-07,
"loss": 0.4223,
"step": 103
},
{
"epoch": 52.0,
"grad_norm": 3.053255319595337,
"learning_rate": 9.956130378462473e-07,
"loss": 0.3066,
"step": 104
},
{
"epoch": 52.5,
"grad_norm": 3.2109737396240234,
"learning_rate": 9.978175972987748e-07,
"loss": 0.3446,
"step": 105
},
{
"epoch": 53.0,
"grad_norm": 2.961881399154663,
"learning_rate": 9.999999999999997e-07,
"loss": 0.2935,
"step": 106
},
{
"epoch": 53.5,
"grad_norm": 3.6308419704437256,
"learning_rate": 1e-06,
"loss": 0.3619,
"step": 107
},
{
"epoch": 54.0,
"grad_norm": 3.809734582901001,
"learning_rate": 1e-06,
"loss": 0.3063,
"step": 108
},
{
"epoch": 54.5,
"grad_norm": 2.6650562286376953,
"learning_rate": 1e-06,
"loss": 0.3504,
"step": 109
},
{
"epoch": 55.0,
"grad_norm": 3.361117362976074,
"learning_rate": 1e-06,
"loss": 0.3084,
"step": 110
},
{
"epoch": 55.5,
"grad_norm": 2.6281044483184814,
"learning_rate": 1e-06,
"loss": 0.3191,
"step": 111
},
{
"epoch": 56.0,
"grad_norm": 3.0685670375823975,
"learning_rate": 1e-06,
"loss": 0.2472,
"step": 112
},
{
"epoch": 56.5,
"grad_norm": 3.149151563644409,
"learning_rate": 1e-06,
"loss": 0.255,
"step": 113
},
{
"epoch": 57.0,
"grad_norm": 3.0701425075531006,
"learning_rate": 1e-06,
"loss": 0.2392,
"step": 114
},
{
"epoch": 57.5,
"grad_norm": 2.449301242828369,
"learning_rate": 1e-06,
"loss": 0.2239,
"step": 115
},
{
"epoch": 58.0,
"grad_norm": 2.5579042434692383,
"learning_rate": 1e-06,
"loss": 0.265,
"step": 116
},
{
"epoch": 58.5,
"grad_norm": 3.4325478076934814,
"learning_rate": 1e-06,
"loss": 0.2522,
"step": 117
},
{
"epoch": 59.0,
"grad_norm": 3.4678075313568115,
"learning_rate": 1e-06,
"loss": 0.1957,
"step": 118
},
{
"epoch": 59.5,
"grad_norm": 3.3002102375030518,
"learning_rate": 1e-06,
"loss": 0.2152,
"step": 119
},
{
"epoch": 60.0,
"grad_norm": 2.939915180206299,
"learning_rate": 1e-06,
"loss": 0.228,
"step": 120
},
{
"epoch": 60.5,
"grad_norm": 2.763664484024048,
"learning_rate": 1e-06,
"loss": 0.2261,
"step": 121
},
{
"epoch": 61.0,
"grad_norm": 2.337585210800171,
"learning_rate": 1e-06,
"loss": 0.1851,
"step": 122
},
{
"epoch": 61.5,
"grad_norm": 2.545644760131836,
"learning_rate": 1e-06,
"loss": 0.2182,
"step": 123
},
{
"epoch": 62.0,
"grad_norm": 2.564753770828247,
"learning_rate": 1e-06,
"loss": 0.1962,
"step": 124
},
{
"epoch": 62.5,
"grad_norm": 2.338416814804077,
"learning_rate": 1e-06,
"loss": 0.2051,
"step": 125
},
{
"epoch": 63.0,
"grad_norm": 2.154878854751587,
"learning_rate": 1e-06,
"loss": 0.1879,
"step": 126
},
{
"epoch": 63.5,
"grad_norm": 3.405646800994873,
"learning_rate": 1e-06,
"loss": 0.1655,
"step": 127
},
{
"epoch": 64.0,
"grad_norm": 1.8845716714859009,
"learning_rate": 1e-06,
"loss": 0.161,
"step": 128
},
{
"epoch": 64.5,
"grad_norm": 1.707482099533081,
"learning_rate": 1e-06,
"loss": 0.1342,
"step": 129
},
{
"epoch": 65.0,
"grad_norm": 3.2093379497528076,
"learning_rate": 1e-06,
"loss": 0.1416,
"step": 130
},
{
"epoch": 65.5,
"grad_norm": 3.438514232635498,
"learning_rate": 1e-06,
"loss": 0.1486,
"step": 131
},
{
"epoch": 66.0,
"grad_norm": 1.9615154266357422,
"learning_rate": 1e-06,
"loss": 0.1461,
"step": 132
},
{
"epoch": 66.5,
"grad_norm": 2.3800694942474365,
"learning_rate": 1e-06,
"loss": 0.1133,
"step": 133
},
{
"epoch": 67.0,
"grad_norm": 2.241901159286499,
"learning_rate": 1e-06,
"loss": 0.1317,
"step": 134
},
{
"epoch": 67.5,
"grad_norm": 2.024240255355835,
"learning_rate": 1e-06,
"loss": 0.1195,
"step": 135
},
{
"epoch": 68.0,
"grad_norm": 2.2146880626678467,
"learning_rate": 1e-06,
"loss": 0.1413,
"step": 136
},
{
"epoch": 68.5,
"grad_norm": 2.4345452785491943,
"learning_rate": 1e-06,
"loss": 0.1313,
"step": 137
},
{
"epoch": 69.0,
"grad_norm": 3.0751936435699463,
"learning_rate": 1e-06,
"loss": 0.1231,
"step": 138
},
{
"epoch": 69.5,
"grad_norm": 2.2487804889678955,
"learning_rate": 1e-06,
"loss": 0.1131,
"step": 139
},
{
"epoch": 70.0,
"grad_norm": 3.5457677841186523,
"learning_rate": 1e-06,
"loss": 0.0812,
"step": 140
},
{
"epoch": 70.5,
"grad_norm": 1.4189667701721191,
"learning_rate": 1e-06,
"loss": 0.0574,
"step": 141
},
{
"epoch": 71.0,
"grad_norm": 2.4439244270324707,
"learning_rate": 1e-06,
"loss": 0.1146,
"step": 142
},
{
"epoch": 71.5,
"grad_norm": 1.7094790935516357,
"learning_rate": 1e-06,
"loss": 0.0937,
"step": 143
},
{
"epoch": 72.0,
"grad_norm": 1.4799623489379883,
"learning_rate": 1e-06,
"loss": 0.085,
"step": 144
},
{
"epoch": 72.5,
"grad_norm": 1.4534505605697632,
"learning_rate": 1e-06,
"loss": 0.074,
"step": 145
},
{
"epoch": 73.0,
"grad_norm": 1.7459020614624023,
"learning_rate": 1e-06,
"loss": 0.1062,
"step": 146
},
{
"epoch": 73.5,
"grad_norm": 1.3827319145202637,
"learning_rate": 1e-06,
"loss": 0.0597,
"step": 147
},
{
"epoch": 74.0,
"grad_norm": 1.4655884504318237,
"learning_rate": 1e-06,
"loss": 0.0749,
"step": 148
},
{
"epoch": 74.5,
"grad_norm": 1.6126585006713867,
"learning_rate": 1e-06,
"loss": 0.0812,
"step": 149
},
{
"epoch": 75.0,
"grad_norm": 1.4007526636123657,
"learning_rate": 1e-06,
"loss": 0.0724,
"step": 150
}
],
"logging_steps": 1.0,
"max_steps": 10000,
"num_input_tokens_seen": 0,
"num_train_epochs": 5000,
"save_steps": 50,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.098224817063854e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}