minicpmv_overfit_lora / trainer_state.json
cjfcsjt's picture
Upload folder using huggingface_hub
52cf168 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 125.0,
"eval_steps": 1000,
"global_step": 250,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.5,
"grad_norm": 3.334595203399658,
"learning_rate": 0.0,
"loss": 1.3402,
"step": 1
},
{
"epoch": 1.0,
"grad_norm": 3.309346914291382,
"learning_rate": 1.5051499783199055e-07,
"loss": 1.284,
"step": 2
},
{
"epoch": 1.5,
"grad_norm": 3.4695301055908203,
"learning_rate": 2.385606273598312e-07,
"loss": 1.3362,
"step": 3
},
{
"epoch": 2.0,
"grad_norm": 2.9988322257995605,
"learning_rate": 3.010299956639811e-07,
"loss": 1.2535,
"step": 4
},
{
"epoch": 2.5,
"grad_norm": 3.0436394214630127,
"learning_rate": 3.494850021680093e-07,
"loss": 1.2326,
"step": 5
},
{
"epoch": 3.0,
"grad_norm": 3.329270601272583,
"learning_rate": 3.8907562519182173e-07,
"loss": 1.3378,
"step": 6
},
{
"epoch": 3.5,
"grad_norm": 3.0574333667755127,
"learning_rate": 4.2254902000712834e-07,
"loss": 1.4162,
"step": 7
},
{
"epoch": 4.0,
"grad_norm": 3.348349094390869,
"learning_rate": 4.5154499349597166e-07,
"loss": 1.3598,
"step": 8
},
{
"epoch": 4.5,
"grad_norm": 2.9368350505828857,
"learning_rate": 4.771212547196623e-07,
"loss": 1.1551,
"step": 9
},
{
"epoch": 5.0,
"grad_norm": 3.3986520767211914,
"learning_rate": 4.999999999999999e-07,
"loss": 1.4285,
"step": 10
},
{
"epoch": 5.5,
"grad_norm": 3.004727363586426,
"learning_rate": 5.206963425791124e-07,
"loss": 1.2629,
"step": 11
},
{
"epoch": 6.0,
"grad_norm": 3.090939998626709,
"learning_rate": 5.395906230238123e-07,
"loss": 1.3378,
"step": 12
},
{
"epoch": 6.5,
"grad_norm": 2.906198740005493,
"learning_rate": 5.569716761534182e-07,
"loss": 1.3384,
"step": 13
},
{
"epoch": 7.0,
"grad_norm": 2.9036598205566406,
"learning_rate": 5.730640178391189e-07,
"loss": 1.2693,
"step": 14
},
{
"epoch": 7.5,
"grad_norm": 2.924349069595337,
"learning_rate": 5.880456295278405e-07,
"loss": 1.2711,
"step": 15
},
{
"epoch": 8.0,
"grad_norm": 3.110586166381836,
"learning_rate": 6.020599913279622e-07,
"loss": 1.2708,
"step": 16
},
{
"epoch": 8.5,
"grad_norm": 3.2055583000183105,
"learning_rate": 6.15224460689137e-07,
"loss": 1.4757,
"step": 17
},
{
"epoch": 9.0,
"grad_norm": 2.972228527069092,
"learning_rate": 6.276362525516529e-07,
"loss": 1.3115,
"step": 18
},
{
"epoch": 9.5,
"grad_norm": 2.447122573852539,
"learning_rate": 6.393768004764143e-07,
"loss": 1.241,
"step": 19
},
{
"epoch": 10.0,
"grad_norm": 2.3147361278533936,
"learning_rate": 6.505149978319905e-07,
"loss": 1.2135,
"step": 20
},
{
"epoch": 10.5,
"grad_norm": 2.9482200145721436,
"learning_rate": 6.611096473669595e-07,
"loss": 1.4155,
"step": 21
},
{
"epoch": 11.0,
"grad_norm": 2.6261355876922607,
"learning_rate": 6.712113404111031e-07,
"loss": 1.2434,
"step": 22
},
{
"epoch": 11.5,
"grad_norm": 2.448061943054199,
"learning_rate": 6.808639180087963e-07,
"loss": 1.2424,
"step": 23
},
{
"epoch": 12.0,
"grad_norm": 2.22385311126709,
"learning_rate": 6.901056208558029e-07,
"loss": 1.1388,
"step": 24
},
{
"epoch": 12.5,
"grad_norm": 2.7259082794189453,
"learning_rate": 6.989700043360186e-07,
"loss": 1.4238,
"step": 25
},
{
"epoch": 13.0,
"grad_norm": 2.03949236869812,
"learning_rate": 7.074866739854088e-07,
"loss": 1.0655,
"step": 26
},
{
"epoch": 13.5,
"grad_norm": 2.341221809387207,
"learning_rate": 7.156818820794935e-07,
"loss": 1.2652,
"step": 27
},
{
"epoch": 14.0,
"grad_norm": 2.2585113048553467,
"learning_rate": 7.235790156711094e-07,
"loss": 1.1248,
"step": 28
},
{
"epoch": 14.5,
"grad_norm": 2.2248682975769043,
"learning_rate": 7.311989989494779e-07,
"loss": 1.1755,
"step": 29
},
{
"epoch": 15.0,
"grad_norm": 2.115250825881958,
"learning_rate": 7.38560627359831e-07,
"loss": 1.1932,
"step": 30
},
{
"epoch": 15.5,
"grad_norm": 2.030606269836426,
"learning_rate": 7.456808469171361e-07,
"loss": 1.2845,
"step": 31
},
{
"epoch": 16.0,
"grad_norm": 2.0579230785369873,
"learning_rate": 7.525749891599529e-07,
"loss": 1.1964,
"step": 32
},
{
"epoch": 16.5,
"grad_norm": 2.074784517288208,
"learning_rate": 7.592569699389436e-07,
"loss": 1.1445,
"step": 33
},
{
"epoch": 17.0,
"grad_norm": 2.064431667327881,
"learning_rate": 7.657394585211274e-07,
"loss": 1.2398,
"step": 34
},
{
"epoch": 17.5,
"grad_norm": 1.8380582332611084,
"learning_rate": 7.720340221751376e-07,
"loss": 1.1373,
"step": 35
},
{
"epoch": 18.0,
"grad_norm": 2.3023219108581543,
"learning_rate": 7.781512503836435e-07,
"loss": 1.0978,
"step": 36
},
{
"epoch": 18.5,
"grad_norm": 2.0860466957092285,
"learning_rate": 7.841008620334974e-07,
"loss": 1.1017,
"step": 37
},
{
"epoch": 19.0,
"grad_norm": 1.9423589706420898,
"learning_rate": 7.89891798308405e-07,
"loss": 1.1538,
"step": 38
},
{
"epoch": 19.5,
"grad_norm": 2.018376350402832,
"learning_rate": 7.955323035132494e-07,
"loss": 1.1345,
"step": 39
},
{
"epoch": 20.0,
"grad_norm": 1.9834052324295044,
"learning_rate": 8.01029995663981e-07,
"loss": 1.1596,
"step": 40
},
{
"epoch": 20.5,
"grad_norm": 1.8433727025985718,
"learning_rate": 8.063919283598676e-07,
"loss": 0.9934,
"step": 41
},
{
"epoch": 21.0,
"grad_norm": 2.0120983123779297,
"learning_rate": 8.116246451989502e-07,
"loss": 1.1849,
"step": 42
},
{
"epoch": 21.5,
"grad_norm": 1.7347400188446045,
"learning_rate": 8.16734227789793e-07,
"loss": 0.9969,
"step": 43
},
{
"epoch": 22.0,
"grad_norm": 2.1413559913635254,
"learning_rate": 8.217263382430935e-07,
"loss": 1.2613,
"step": 44
},
{
"epoch": 22.5,
"grad_norm": 1.9589574337005615,
"learning_rate": 8.266062568876716e-07,
"loss": 1.134,
"step": 45
},
{
"epoch": 23.0,
"grad_norm": 1.7819244861602783,
"learning_rate": 8.313789158407869e-07,
"loss": 1.0627,
"step": 46
},
{
"epoch": 23.5,
"grad_norm": 2.12137508392334,
"learning_rate": 8.360489289678585e-07,
"loss": 1.1799,
"step": 47
},
{
"epoch": 24.0,
"grad_norm": 1.7399541139602661,
"learning_rate": 8.406206186877934e-07,
"loss": 0.9974,
"step": 48
},
{
"epoch": 24.5,
"grad_norm": 2.0634093284606934,
"learning_rate": 8.450980400142567e-07,
"loss": 0.9758,
"step": 49
},
{
"epoch": 25.0,
"grad_norm": 1.8642668724060059,
"learning_rate": 8.494850021680092e-07,
"loss": 1.1103,
"step": 50
},
{
"epoch": 25.5,
"grad_norm": 1.8973793983459473,
"learning_rate": 8.53785088048968e-07,
"loss": 1.1013,
"step": 51
},
{
"epoch": 26.0,
"grad_norm": 1.8340080976486206,
"learning_rate": 8.580016718173995e-07,
"loss": 0.9959,
"step": 52
},
{
"epoch": 26.5,
"grad_norm": 1.8867207765579224,
"learning_rate": 8.621379348003944e-07,
"loss": 1.1526,
"step": 53
},
{
"epoch": 27.0,
"grad_norm": 1.7188408374786377,
"learning_rate": 8.661968799114842e-07,
"loss": 0.9184,
"step": 54
},
{
"epoch": 27.5,
"grad_norm": 1.8246105909347534,
"learning_rate": 8.701813447471218e-07,
"loss": 1.0568,
"step": 55
},
{
"epoch": 28.0,
"grad_norm": 1.9179924726486206,
"learning_rate": 8.740940135031001e-07,
"loss": 1.1564,
"step": 56
},
{
"epoch": 28.5,
"grad_norm": 1.7202187776565552,
"learning_rate": 8.779374278362456e-07,
"loss": 0.9801,
"step": 57
},
{
"epoch": 29.0,
"grad_norm": 1.891327977180481,
"learning_rate": 8.817139967814684e-07,
"loss": 1.1344,
"step": 58
},
{
"epoch": 29.5,
"grad_norm": 1.6394891738891602,
"learning_rate": 8.854260058210719e-07,
"loss": 0.9748,
"step": 59
},
{
"epoch": 30.0,
"grad_norm": 1.8808232545852661,
"learning_rate": 8.890756251918216e-07,
"loss": 0.9423,
"step": 60
},
{
"epoch": 30.5,
"grad_norm": 1.8192319869995117,
"learning_rate": 8.926649175053833e-07,
"loss": 1.003,
"step": 61
},
{
"epoch": 31.0,
"grad_norm": 1.7621928453445435,
"learning_rate": 8.961958447491268e-07,
"loss": 1.0498,
"step": 62
},
{
"epoch": 31.5,
"grad_norm": 1.8957831859588623,
"learning_rate": 8.996702747267907e-07,
"loss": 1.1259,
"step": 63
},
{
"epoch": 32.0,
"grad_norm": 1.6946312189102173,
"learning_rate": 9.030899869919433e-07,
"loss": 0.9092,
"step": 64
},
{
"epoch": 32.5,
"grad_norm": 1.6719154119491577,
"learning_rate": 9.064566783214276e-07,
"loss": 0.9273,
"step": 65
},
{
"epoch": 33.0,
"grad_norm": 1.6526826620101929,
"learning_rate": 9.097719677709341e-07,
"loss": 0.9469,
"step": 66
},
{
"epoch": 33.5,
"grad_norm": 1.7876310348510742,
"learning_rate": 9.13037401350413e-07,
"loss": 0.8734,
"step": 67
},
{
"epoch": 34.0,
"grad_norm": 1.517971396446228,
"learning_rate": 9.162544563531181e-07,
"loss": 0.9527,
"step": 68
},
{
"epoch": 34.5,
"grad_norm": 1.7323296070098877,
"learning_rate": 9.194245453686276e-07,
"loss": 0.8279,
"step": 69
},
{
"epoch": 35.0,
"grad_norm": 1.5928230285644531,
"learning_rate": 9.225490200071283e-07,
"loss": 0.9627,
"step": 70
},
{
"epoch": 35.5,
"grad_norm": 1.6561415195465088,
"learning_rate": 9.256291743595375e-07,
"loss": 0.9138,
"step": 71
},
{
"epoch": 36.0,
"grad_norm": 1.628674030303955,
"learning_rate": 9.28666248215634e-07,
"loss": 0.8956,
"step": 72
},
{
"epoch": 36.5,
"grad_norm": 1.5087110996246338,
"learning_rate": 9.316614300602277e-07,
"loss": 0.8223,
"step": 73
},
{
"epoch": 37.0,
"grad_norm": 1.6503626108169556,
"learning_rate": 9.346158598654879e-07,
"loss": 0.9776,
"step": 74
},
{
"epoch": 37.5,
"grad_norm": 1.6680738925933838,
"learning_rate": 9.375306316958498e-07,
"loss": 0.8301,
"step": 75
},
{
"epoch": 38.0,
"grad_norm": 1.8239458799362183,
"learning_rate": 9.404067961403955e-07,
"loss": 0.9891,
"step": 76
},
{
"epoch": 38.5,
"grad_norm": 1.669043779373169,
"learning_rate": 9.432453625862408e-07,
"loss": 0.9085,
"step": 77
},
{
"epoch": 39.0,
"grad_norm": 1.8124521970748901,
"learning_rate": 9.4604730134524e-07,
"loss": 0.87,
"step": 78
},
{
"epoch": 39.5,
"grad_norm": 1.6593950986862183,
"learning_rate": 9.488135456452205e-07,
"loss": 0.8142,
"step": 79
},
{
"epoch": 40.0,
"grad_norm": 1.6837782859802246,
"learning_rate": 9.515449934959715e-07,
"loss": 0.8246,
"step": 80
},
{
"epoch": 40.5,
"grad_norm": 1.7322601079940796,
"learning_rate": 9.542425094393247e-07,
"loss": 0.8752,
"step": 81
},
{
"epoch": 41.0,
"grad_norm": 1.5245649814605713,
"learning_rate": 9.569069261918583e-07,
"loss": 0.7944,
"step": 82
},
{
"epoch": 41.5,
"grad_norm": 1.713905692100525,
"learning_rate": 9.59539046188037e-07,
"loss": 0.8122,
"step": 83
},
{
"epoch": 42.0,
"grad_norm": 1.7115576267242432,
"learning_rate": 9.621396430309406e-07,
"loss": 0.8612,
"step": 84
},
{
"epoch": 42.5,
"grad_norm": 1.770555019378662,
"learning_rate": 9.647094628571462e-07,
"loss": 0.9131,
"step": 85
},
{
"epoch": 43.0,
"grad_norm": 1.8751977682113647,
"learning_rate": 9.672492256217836e-07,
"loss": 0.8756,
"step": 86
},
{
"epoch": 43.5,
"grad_norm": 1.8345999717712402,
"learning_rate": 9.69759626309309e-07,
"loss": 0.9098,
"step": 87
},
{
"epoch": 44.0,
"grad_norm": 1.6411185264587402,
"learning_rate": 9.722413360750842e-07,
"loss": 0.7975,
"step": 88
},
{
"epoch": 44.5,
"grad_norm": 1.744828462600708,
"learning_rate": 9.74695003322456e-07,
"loss": 0.8444,
"step": 89
},
{
"epoch": 45.0,
"grad_norm": 1.6588188409805298,
"learning_rate": 9.771212547196622e-07,
"loss": 0.8376,
"step": 90
},
{
"epoch": 45.5,
"grad_norm": 1.8046928644180298,
"learning_rate": 9.795206961605466e-07,
"loss": 0.7546,
"step": 91
},
{
"epoch": 46.0,
"grad_norm": 1.8828351497650146,
"learning_rate": 9.818939136727774e-07,
"loss": 0.8505,
"step": 92
},
{
"epoch": 46.5,
"grad_norm": 1.841956377029419,
"learning_rate": 9.842414742769674e-07,
"loss": 0.7847,
"step": 93
},
{
"epoch": 47.0,
"grad_norm": 1.682256817817688,
"learning_rate": 9.865639267998492e-07,
"loss": 0.7712,
"step": 94
},
{
"epoch": 47.5,
"grad_norm": 1.8375487327575684,
"learning_rate": 9.888618026444236e-07,
"loss": 0.7228,
"step": 95
},
{
"epoch": 48.0,
"grad_norm": 1.7198117971420288,
"learning_rate": 9.91135616519784e-07,
"loss": 0.7755,
"step": 96
},
{
"epoch": 48.5,
"grad_norm": 1.760452389717102,
"learning_rate": 9.933858671331222e-07,
"loss": 0.7045,
"step": 97
},
{
"epoch": 49.0,
"grad_norm": 1.735704779624939,
"learning_rate": 9.956130378462473e-07,
"loss": 0.8024,
"step": 98
},
{
"epoch": 49.5,
"grad_norm": 1.6422948837280273,
"learning_rate": 9.978175972987748e-07,
"loss": 0.7368,
"step": 99
},
{
"epoch": 50.0,
"grad_norm": 1.8960306644439697,
"learning_rate": 9.999999999999997e-07,
"loss": 0.7557,
"step": 100
},
{
"epoch": 50.5,
"grad_norm": 1.6727304458618164,
"learning_rate": 1e-06,
"loss": 0.7325,
"step": 101
},
{
"epoch": 51.0,
"grad_norm": 1.6515084505081177,
"learning_rate": 1e-06,
"loss": 0.7351,
"step": 102
},
{
"epoch": 51.5,
"grad_norm": 1.7705847024917603,
"learning_rate": 1e-06,
"loss": 0.7907,
"step": 103
},
{
"epoch": 52.0,
"grad_norm": 1.7057950496673584,
"learning_rate": 1e-06,
"loss": 0.6447,
"step": 104
},
{
"epoch": 52.5,
"grad_norm": 1.6130571365356445,
"learning_rate": 1e-06,
"loss": 0.7079,
"step": 105
},
{
"epoch": 53.0,
"grad_norm": 2.063298463821411,
"learning_rate": 1e-06,
"loss": 0.693,
"step": 106
},
{
"epoch": 53.5,
"grad_norm": 2.0730509757995605,
"learning_rate": 1e-06,
"loss": 0.8002,
"step": 107
},
{
"epoch": 54.0,
"grad_norm": 1.6381713151931763,
"learning_rate": 1e-06,
"loss": 0.657,
"step": 108
},
{
"epoch": 54.5,
"grad_norm": 1.5659828186035156,
"learning_rate": 1e-06,
"loss": 0.7202,
"step": 109
},
{
"epoch": 55.0,
"grad_norm": 1.594575047492981,
"learning_rate": 1e-06,
"loss": 0.6627,
"step": 110
},
{
"epoch": 55.5,
"grad_norm": 1.497917652130127,
"learning_rate": 1e-06,
"loss": 0.6693,
"step": 111
},
{
"epoch": 56.0,
"grad_norm": 2.5011086463928223,
"learning_rate": 1e-06,
"loss": 0.6946,
"step": 112
},
{
"epoch": 56.5,
"grad_norm": 1.9602758884429932,
"learning_rate": 1e-06,
"loss": 0.6926,
"step": 113
},
{
"epoch": 57.0,
"grad_norm": 1.4793510437011719,
"learning_rate": 1e-06,
"loss": 0.5861,
"step": 114
},
{
"epoch": 57.5,
"grad_norm": 1.6028777360916138,
"learning_rate": 1e-06,
"loss": 0.5788,
"step": 115
},
{
"epoch": 58.0,
"grad_norm": 1.6478813886642456,
"learning_rate": 1e-06,
"loss": 0.6395,
"step": 116
},
{
"epoch": 58.5,
"grad_norm": 1.5423738956451416,
"learning_rate": 1e-06,
"loss": 0.5959,
"step": 117
},
{
"epoch": 59.0,
"grad_norm": 1.8497169017791748,
"learning_rate": 1e-06,
"loss": 0.584,
"step": 118
},
{
"epoch": 59.5,
"grad_norm": 1.6440547704696655,
"learning_rate": 1e-06,
"loss": 0.5916,
"step": 119
},
{
"epoch": 60.0,
"grad_norm": 1.765620231628418,
"learning_rate": 1e-06,
"loss": 0.6226,
"step": 120
},
{
"epoch": 60.5,
"grad_norm": 1.543800950050354,
"learning_rate": 1e-06,
"loss": 0.5966,
"step": 121
},
{
"epoch": 61.0,
"grad_norm": 1.4944016933441162,
"learning_rate": 1e-06,
"loss": 0.5305,
"step": 122
},
{
"epoch": 61.5,
"grad_norm": 1.968621850013733,
"learning_rate": 1e-06,
"loss": 0.6673,
"step": 123
},
{
"epoch": 62.0,
"grad_norm": 1.523604393005371,
"learning_rate": 1e-06,
"loss": 0.5424,
"step": 124
},
{
"epoch": 62.5,
"grad_norm": 1.6466797590255737,
"learning_rate": 1e-06,
"loss": 0.6007,
"step": 125
},
{
"epoch": 63.0,
"grad_norm": 1.7836798429489136,
"learning_rate": 1e-06,
"loss": 0.6201,
"step": 126
},
{
"epoch": 63.5,
"grad_norm": 1.6673424243927002,
"learning_rate": 1e-06,
"loss": 0.5786,
"step": 127
},
{
"epoch": 64.0,
"grad_norm": 1.6889145374298096,
"learning_rate": 1e-06,
"loss": 0.5211,
"step": 128
},
{
"epoch": 64.5,
"grad_norm": 1.4834386110305786,
"learning_rate": 1e-06,
"loss": 0.4521,
"step": 129
},
{
"epoch": 65.0,
"grad_norm": 1.743851661682129,
"learning_rate": 1e-06,
"loss": 0.5363,
"step": 130
},
{
"epoch": 65.5,
"grad_norm": 1.8134723901748657,
"learning_rate": 1e-06,
"loss": 0.554,
"step": 131
},
{
"epoch": 66.0,
"grad_norm": 1.508358120918274,
"learning_rate": 1e-06,
"loss": 0.5104,
"step": 132
},
{
"epoch": 66.5,
"grad_norm": 1.6829733848571777,
"learning_rate": 1e-06,
"loss": 0.4658,
"step": 133
},
{
"epoch": 67.0,
"grad_norm": 1.526950716972351,
"learning_rate": 1e-06,
"loss": 0.4892,
"step": 134
},
{
"epoch": 67.5,
"grad_norm": 1.8935024738311768,
"learning_rate": 1e-06,
"loss": 0.4979,
"step": 135
},
{
"epoch": 68.0,
"grad_norm": 1.4638999700546265,
"learning_rate": 1e-06,
"loss": 0.5144,
"step": 136
},
{
"epoch": 68.5,
"grad_norm": 1.9910645484924316,
"learning_rate": 1e-06,
"loss": 0.5521,
"step": 137
},
{
"epoch": 69.0,
"grad_norm": 1.6257317066192627,
"learning_rate": 1e-06,
"loss": 0.4937,
"step": 138
},
{
"epoch": 69.5,
"grad_norm": 1.4498651027679443,
"learning_rate": 1e-06,
"loss": 0.4749,
"step": 139
},
{
"epoch": 70.0,
"grad_norm": 1.8104501962661743,
"learning_rate": 1e-06,
"loss": 0.4564,
"step": 140
},
{
"epoch": 70.5,
"grad_norm": 2.0244479179382324,
"learning_rate": 1e-06,
"loss": 0.4228,
"step": 141
},
{
"epoch": 71.0,
"grad_norm": 1.5190598964691162,
"learning_rate": 1e-06,
"loss": 0.4735,
"step": 142
},
{
"epoch": 71.5,
"grad_norm": 1.7180043458938599,
"learning_rate": 1e-06,
"loss": 0.4776,
"step": 143
},
{
"epoch": 72.0,
"grad_norm": 1.5680577754974365,
"learning_rate": 1e-06,
"loss": 0.4343,
"step": 144
},
{
"epoch": 72.5,
"grad_norm": 1.6798756122589111,
"learning_rate": 1e-06,
"loss": 0.4074,
"step": 145
},
{
"epoch": 73.0,
"grad_norm": 1.4644179344177246,
"learning_rate": 1e-06,
"loss": 0.4982,
"step": 146
},
{
"epoch": 73.5,
"grad_norm": 1.5461561679840088,
"learning_rate": 1e-06,
"loss": 0.3539,
"step": 147
},
{
"epoch": 74.0,
"grad_norm": 1.7116854190826416,
"learning_rate": 1e-06,
"loss": 0.4267,
"step": 148
},
{
"epoch": 74.5,
"grad_norm": 1.6357485055923462,
"learning_rate": 1e-06,
"loss": 0.4563,
"step": 149
},
{
"epoch": 75.0,
"grad_norm": 1.3843780755996704,
"learning_rate": 1e-06,
"loss": 0.4072,
"step": 150
},
{
"epoch": 75.5,
"grad_norm": 1.6510047912597656,
"learning_rate": 1e-06,
"loss": 0.4619,
"step": 151
},
{
"epoch": 76.0,
"grad_norm": 1.5008376836776733,
"learning_rate": 1e-06,
"loss": 0.3768,
"step": 152
},
{
"epoch": 76.5,
"grad_norm": 1.4433045387268066,
"learning_rate": 1e-06,
"loss": 0.4502,
"step": 153
},
{
"epoch": 77.0,
"grad_norm": 1.4826611280441284,
"learning_rate": 1e-06,
"loss": 0.3686,
"step": 154
},
{
"epoch": 77.5,
"grad_norm": 1.5890164375305176,
"learning_rate": 1e-06,
"loss": 0.3642,
"step": 155
},
{
"epoch": 78.0,
"grad_norm": 1.5281238555908203,
"learning_rate": 1e-06,
"loss": 0.4034,
"step": 156
},
{
"epoch": 78.5,
"grad_norm": 1.3185185194015503,
"learning_rate": 1e-06,
"loss": 0.336,
"step": 157
},
{
"epoch": 79.0,
"grad_norm": 1.6037932634353638,
"learning_rate": 1e-06,
"loss": 0.4349,
"step": 158
},
{
"epoch": 79.5,
"grad_norm": 1.384059190750122,
"learning_rate": 1e-06,
"loss": 0.3431,
"step": 159
},
{
"epoch": 80.0,
"grad_norm": 1.339905858039856,
"learning_rate": 1e-06,
"loss": 0.3745,
"step": 160
},
{
"epoch": 80.5,
"grad_norm": 1.2671548128128052,
"learning_rate": 1e-06,
"loss": 0.3476,
"step": 161
},
{
"epoch": 81.0,
"grad_norm": 1.5032880306243896,
"learning_rate": 1e-06,
"loss": 0.3297,
"step": 162
},
{
"epoch": 81.5,
"grad_norm": 1.432960033416748,
"learning_rate": 1e-06,
"loss": 0.3829,
"step": 163
},
{
"epoch": 82.0,
"grad_norm": 1.785606026649475,
"learning_rate": 1e-06,
"loss": 0.4002,
"step": 164
},
{
"epoch": 82.5,
"grad_norm": 1.599700927734375,
"learning_rate": 1e-06,
"loss": 0.3272,
"step": 165
},
{
"epoch": 83.0,
"grad_norm": 1.3606281280517578,
"learning_rate": 1e-06,
"loss": 0.3462,
"step": 166
},
{
"epoch": 83.5,
"grad_norm": 1.311733603477478,
"learning_rate": 1e-06,
"loss": 0.3459,
"step": 167
},
{
"epoch": 84.0,
"grad_norm": 1.577045202255249,
"learning_rate": 1e-06,
"loss": 0.278,
"step": 168
},
{
"epoch": 84.5,
"grad_norm": 1.5641367435455322,
"learning_rate": 1e-06,
"loss": 0.3636,
"step": 169
},
{
"epoch": 85.0,
"grad_norm": 1.2674757242202759,
"learning_rate": 1e-06,
"loss": 0.3074,
"step": 170
},
{
"epoch": 85.5,
"grad_norm": 1.423398494720459,
"learning_rate": 1e-06,
"loss": 0.32,
"step": 171
},
{
"epoch": 86.0,
"grad_norm": 1.149396538734436,
"learning_rate": 1e-06,
"loss": 0.2577,
"step": 172
},
{
"epoch": 86.5,
"grad_norm": 1.687155842781067,
"learning_rate": 1e-06,
"loss": 0.2998,
"step": 173
},
{
"epoch": 87.0,
"grad_norm": 1.0938485860824585,
"learning_rate": 1e-06,
"loss": 0.2963,
"step": 174
},
{
"epoch": 87.5,
"grad_norm": 1.2464781999588013,
"learning_rate": 1e-06,
"loss": 0.2691,
"step": 175
},
{
"epoch": 88.0,
"grad_norm": 1.259631633758545,
"learning_rate": 1e-06,
"loss": 0.2815,
"step": 176
},
{
"epoch": 88.5,
"grad_norm": 1.2384026050567627,
"learning_rate": 1e-06,
"loss": 0.1886,
"step": 177
},
{
"epoch": 89.0,
"grad_norm": 1.209479808807373,
"learning_rate": 1e-06,
"loss": 0.3283,
"step": 178
},
{
"epoch": 89.5,
"grad_norm": 1.2056385278701782,
"learning_rate": 1e-06,
"loss": 0.2795,
"step": 179
},
{
"epoch": 90.0,
"grad_norm": 1.256142258644104,
"learning_rate": 1e-06,
"loss": 0.3071,
"step": 180
},
{
"epoch": 90.5,
"grad_norm": 1.2020200490951538,
"learning_rate": 1e-06,
"loss": 0.242,
"step": 181
},
{
"epoch": 91.0,
"grad_norm": 1.275436520576477,
"learning_rate": 1e-06,
"loss": 0.2505,
"step": 182
},
{
"epoch": 91.5,
"grad_norm": 1.1096285581588745,
"learning_rate": 1e-06,
"loss": 0.2833,
"step": 183
},
{
"epoch": 92.0,
"grad_norm": 1.0823484659194946,
"learning_rate": 1e-06,
"loss": 0.216,
"step": 184
},
{
"epoch": 92.5,
"grad_norm": 1.112586498260498,
"learning_rate": 1e-06,
"loss": 0.2292,
"step": 185
},
{
"epoch": 93.0,
"grad_norm": 1.004947543144226,
"learning_rate": 1e-06,
"loss": 0.2399,
"step": 186
},
{
"epoch": 93.5,
"grad_norm": 1.10011887550354,
"learning_rate": 1e-06,
"loss": 0.263,
"step": 187
},
{
"epoch": 94.0,
"grad_norm": 0.9535015821456909,
"learning_rate": 1e-06,
"loss": 0.218,
"step": 188
},
{
"epoch": 94.5,
"grad_norm": 1.0121976137161255,
"learning_rate": 1e-06,
"loss": 0.191,
"step": 189
},
{
"epoch": 95.0,
"grad_norm": 0.9026556611061096,
"learning_rate": 1e-06,
"loss": 0.2278,
"step": 190
},
{
"epoch": 95.5,
"grad_norm": 0.9730249643325806,
"learning_rate": 1e-06,
"loss": 0.2422,
"step": 191
},
{
"epoch": 96.0,
"grad_norm": 0.9288642406463623,
"learning_rate": 1e-06,
"loss": 0.2353,
"step": 192
},
{
"epoch": 96.5,
"grad_norm": 0.8509739637374878,
"learning_rate": 1e-06,
"loss": 0.2292,
"step": 193
},
{
"epoch": 97.0,
"grad_norm": 0.9947998523712158,
"learning_rate": 1e-06,
"loss": 0.2309,
"step": 194
},
{
"epoch": 97.5,
"grad_norm": 1.109282374382019,
"learning_rate": 1e-06,
"loss": 0.2369,
"step": 195
},
{
"epoch": 98.0,
"grad_norm": 0.8555991053581238,
"learning_rate": 1e-06,
"loss": 0.2011,
"step": 196
},
{
"epoch": 98.5,
"grad_norm": 0.9674638509750366,
"learning_rate": 1e-06,
"loss": 0.2385,
"step": 197
},
{
"epoch": 99.0,
"grad_norm": 0.781050443649292,
"learning_rate": 1e-06,
"loss": 0.1881,
"step": 198
},
{
"epoch": 99.5,
"grad_norm": 0.8599874377250671,
"learning_rate": 1e-06,
"loss": 0.2031,
"step": 199
},
{
"epoch": 100.0,
"grad_norm": 0.8711087703704834,
"learning_rate": 1e-06,
"loss": 0.2214,
"step": 200
},
{
"epoch": 100.5,
"grad_norm": 0.9213354587554932,
"learning_rate": 1e-06,
"loss": 0.2313,
"step": 201
},
{
"epoch": 101.0,
"grad_norm": 0.871462345123291,
"learning_rate": 1e-06,
"loss": 0.1978,
"step": 202
},
{
"epoch": 101.5,
"grad_norm": 0.7935155630111694,
"learning_rate": 1e-06,
"loss": 0.1873,
"step": 203
},
{
"epoch": 102.0,
"grad_norm": 0.9139618277549744,
"learning_rate": 1e-06,
"loss": 0.2283,
"step": 204
},
{
"epoch": 102.5,
"grad_norm": 0.8635255694389343,
"learning_rate": 1e-06,
"loss": 0.228,
"step": 205
},
{
"epoch": 103.0,
"grad_norm": 0.9213907122612,
"learning_rate": 1e-06,
"loss": 0.1837,
"step": 206
},
{
"epoch": 103.5,
"grad_norm": 0.7787233591079712,
"learning_rate": 1e-06,
"loss": 0.1652,
"step": 207
},
{
"epoch": 104.0,
"grad_norm": 0.8260976076126099,
"learning_rate": 1e-06,
"loss": 0.1986,
"step": 208
},
{
"epoch": 104.5,
"grad_norm": 0.8949348330497742,
"learning_rate": 1e-06,
"loss": 0.172,
"step": 209
},
{
"epoch": 105.0,
"grad_norm": 0.8772971630096436,
"learning_rate": 1e-06,
"loss": 0.201,
"step": 210
},
{
"epoch": 105.5,
"grad_norm": 0.7942510843276978,
"learning_rate": 1e-06,
"loss": 0.1754,
"step": 211
},
{
"epoch": 106.0,
"grad_norm": 0.8099932670593262,
"learning_rate": 1e-06,
"loss": 0.1586,
"step": 212
},
{
"epoch": 106.5,
"grad_norm": 0.880547285079956,
"learning_rate": 1e-06,
"loss": 0.1516,
"step": 213
},
{
"epoch": 107.0,
"grad_norm": 0.8132925033569336,
"learning_rate": 1e-06,
"loss": 0.1657,
"step": 214
},
{
"epoch": 107.5,
"grad_norm": 0.8455451726913452,
"learning_rate": 1e-06,
"loss": 0.1994,
"step": 215
},
{
"epoch": 108.0,
"grad_norm": 0.9202403426170349,
"learning_rate": 1e-06,
"loss": 0.1486,
"step": 216
},
{
"epoch": 108.5,
"grad_norm": 0.8958231806755066,
"learning_rate": 1e-06,
"loss": 0.1949,
"step": 217
},
{
"epoch": 109.0,
"grad_norm": 0.8252700567245483,
"learning_rate": 1e-06,
"loss": 0.1645,
"step": 218
},
{
"epoch": 109.5,
"grad_norm": 0.796977698802948,
"learning_rate": 1e-06,
"loss": 0.1297,
"step": 219
},
{
"epoch": 110.0,
"grad_norm": 0.8288230895996094,
"learning_rate": 1e-06,
"loss": 0.1967,
"step": 220
},
{
"epoch": 110.5,
"grad_norm": 0.9239948987960815,
"learning_rate": 1e-06,
"loss": 0.1546,
"step": 221
},
{
"epoch": 111.0,
"grad_norm": 0.8271680474281311,
"learning_rate": 1e-06,
"loss": 0.1748,
"step": 222
},
{
"epoch": 111.5,
"grad_norm": 0.7675459980964661,
"learning_rate": 1e-06,
"loss": 0.1262,
"step": 223
},
{
"epoch": 112.0,
"grad_norm": 0.7924964427947998,
"learning_rate": 1e-06,
"loss": 0.1434,
"step": 224
},
{
"epoch": 112.5,
"grad_norm": 0.9103841185569763,
"learning_rate": 1e-06,
"loss": 0.186,
"step": 225
},
{
"epoch": 113.0,
"grad_norm": 0.8457869291305542,
"learning_rate": 1e-06,
"loss": 0.1384,
"step": 226
},
{
"epoch": 113.5,
"grad_norm": 0.8478854298591614,
"learning_rate": 1e-06,
"loss": 0.1743,
"step": 227
},
{
"epoch": 114.0,
"grad_norm": 0.8645926713943481,
"learning_rate": 1e-06,
"loss": 0.1514,
"step": 228
},
{
"epoch": 114.5,
"grad_norm": 0.9108607769012451,
"learning_rate": 1e-06,
"loss": 0.1759,
"step": 229
},
{
"epoch": 115.0,
"grad_norm": 0.8300096392631531,
"learning_rate": 1e-06,
"loss": 0.1375,
"step": 230
},
{
"epoch": 115.5,
"grad_norm": 0.9206691384315491,
"learning_rate": 1e-06,
"loss": 0.1369,
"step": 231
},
{
"epoch": 116.0,
"grad_norm": 0.7558128833770752,
"learning_rate": 1e-06,
"loss": 0.1242,
"step": 232
},
{
"epoch": 116.5,
"grad_norm": 0.8597300052642822,
"learning_rate": 1e-06,
"loss": 0.128,
"step": 233
},
{
"epoch": 117.0,
"grad_norm": 0.8134746551513672,
"learning_rate": 1e-06,
"loss": 0.1547,
"step": 234
},
{
"epoch": 117.5,
"grad_norm": 0.9657474160194397,
"learning_rate": 1e-06,
"loss": 0.1534,
"step": 235
},
{
"epoch": 118.0,
"grad_norm": 0.7481112480163574,
"learning_rate": 1e-06,
"loss": 0.1282,
"step": 236
},
{
"epoch": 118.5,
"grad_norm": 0.6953885555267334,
"learning_rate": 1e-06,
"loss": 0.0988,
"step": 237
},
{
"epoch": 119.0,
"grad_norm": 0.8225458860397339,
"learning_rate": 1e-06,
"loss": 0.1135,
"step": 238
},
{
"epoch": 119.5,
"grad_norm": 0.7915026545524597,
"learning_rate": 1e-06,
"loss": 0.1043,
"step": 239
},
{
"epoch": 120.0,
"grad_norm": 0.7963205575942993,
"learning_rate": 1e-06,
"loss": 0.1474,
"step": 240
},
{
"epoch": 120.5,
"grad_norm": 0.796559751033783,
"learning_rate": 1e-06,
"loss": 0.1295,
"step": 241
},
{
"epoch": 121.0,
"grad_norm": 0.8174726963043213,
"learning_rate": 1e-06,
"loss": 0.1305,
"step": 242
},
{
"epoch": 121.5,
"grad_norm": 0.7992000579833984,
"learning_rate": 1e-06,
"loss": 0.1078,
"step": 243
},
{
"epoch": 122.0,
"grad_norm": 0.7742059826850891,
"learning_rate": 1e-06,
"loss": 0.1082,
"step": 244
},
{
"epoch": 122.5,
"grad_norm": 0.7738575339317322,
"learning_rate": 1e-06,
"loss": 0.1075,
"step": 245
},
{
"epoch": 123.0,
"grad_norm": 0.7644574642181396,
"learning_rate": 1e-06,
"loss": 0.1228,
"step": 246
},
{
"epoch": 123.5,
"grad_norm": 0.7060183882713318,
"learning_rate": 1e-06,
"loss": 0.1115,
"step": 247
},
{
"epoch": 124.0,
"grad_norm": 0.7386205196380615,
"learning_rate": 1e-06,
"loss": 0.1181,
"step": 248
},
{
"epoch": 124.5,
"grad_norm": 0.7068957090377808,
"learning_rate": 1e-06,
"loss": 0.1156,
"step": 249
},
{
"epoch": 125.0,
"grad_norm": 0.7993431687355042,
"learning_rate": 1e-06,
"loss": 0.088,
"step": 250
}
],
"logging_steps": 1.0,
"max_steps": 10000,
"num_input_tokens_seen": 0,
"num_train_epochs": 5000,
"save_steps": 50,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.317943074155397e+17,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}