| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.496, | |
| "eval_steps": 156, | |
| "global_step": 624, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 0.0001, | |
| "loss": 1.031, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0002, | |
| "loss": 2.1316, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.00019973262032085563, | |
| "loss": 1.0588, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00019946524064171124, | |
| "loss": 2.037, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00019919786096256686, | |
| "loss": 1.0911, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00019893048128342245, | |
| "loss": 1.8383, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019866310160427807, | |
| "loss": 1.8296, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001983957219251337, | |
| "loss": 1.8112, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.0001981283422459893, | |
| "loss": 1.7675, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019786096256684492, | |
| "loss": 1.7701, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019759358288770054, | |
| "loss": 1.7535, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019732620320855616, | |
| "loss": 1.7679, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019705882352941177, | |
| "loss": 1.7772, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001967914438502674, | |
| "loss": 1.7286, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.000196524064171123, | |
| "loss": 1.6585, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00019625668449197863, | |
| "loss": 0.9072, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019598930481283424, | |
| "loss": 1.6208, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00019572192513368986, | |
| "loss": 0.9375, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019545454545454548, | |
| "loss": 1.5977, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00019518716577540107, | |
| "loss": 1.7837, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001949197860962567, | |
| "loss": 1.5817, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0001946524064171123, | |
| "loss": 1.7005, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00019438502673796792, | |
| "loss": 0.7865, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019411764705882354, | |
| "loss": 0.7424, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019385026737967916, | |
| "loss": 1.6181, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00019358288770053477, | |
| "loss": 1.5176, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.0001933155080213904, | |
| "loss": 0.8716, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019304812834224598, | |
| "loss": 1.6366, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001927807486631016, | |
| "loss": 0.861, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019251336898395722, | |
| "loss": 1.655, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019224598930481283, | |
| "loss": 0.9451, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019197860962566845, | |
| "loss": 1.6875, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00019171122994652407, | |
| "loss": 1.6885, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001914438502673797, | |
| "loss": 1.5641, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001911764705882353, | |
| "loss": 1.7106, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019090909090909092, | |
| "loss": 0.8359, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019064171122994654, | |
| "loss": 1.634, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019037433155080216, | |
| "loss": 1.7435, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019010695187165777, | |
| "loss": 1.5864, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001898395721925134, | |
| "loss": 1.5986, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.000189572192513369, | |
| "loss": 0.8104, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.0001893048128342246, | |
| "loss": 1.5903, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00018903743315508022, | |
| "loss": 0.706, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018877005347593583, | |
| "loss": 1.6629, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018850267379679145, | |
| "loss": 1.6884, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00018823529411764707, | |
| "loss": 1.622, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001879679144385027, | |
| "loss": 1.6225, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001877005347593583, | |
| "loss": 0.8068, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001874331550802139, | |
| "loss": 0.0, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001871657754010695, | |
| "loss": 0.7136, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00018689839572192513, | |
| "loss": 1.487, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018663101604278075, | |
| "loss": 1.6591, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00018636363636363636, | |
| "loss": 1.564, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.000186096256684492, | |
| "loss": 1.6848, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.0001858288770053476, | |
| "loss": 1.6442, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00018556149732620322, | |
| "loss": 1.634, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018529411764705883, | |
| "loss": 1.6559, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00018502673796791445, | |
| "loss": 0.8619, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00018475935828877007, | |
| "loss": 1.4555, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001844919786096257, | |
| "loss": 1.6295, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.0001842245989304813, | |
| "loss": 0.8006, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00018395721925133692, | |
| "loss": 1.7844, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001836898395721925, | |
| "loss": 1.5599, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00018342245989304813, | |
| "loss": 0.8152, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00018315508021390375, | |
| "loss": 1.5898, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00018288770053475936, | |
| "loss": 0.8534, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00018262032085561498, | |
| "loss": 1.6466, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001823529411764706, | |
| "loss": 1.7625, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00018208556149732622, | |
| "loss": 0.6436, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00018181818181818183, | |
| "loss": 0.8358, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00018155080213903742, | |
| "loss": 1.5384, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00018128342245989304, | |
| "loss": 1.5965, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00018101604278074866, | |
| "loss": 1.6551, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00018074866310160428, | |
| "loss": 1.6287, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00018048128342245992, | |
| "loss": 1.6807, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00018021390374331554, | |
| "loss": 0.8354, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00017994652406417113, | |
| "loss": 0.0, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00017967914438502675, | |
| "loss": 1.3238, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00017941176470588236, | |
| "loss": 0.7033, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00017914438502673798, | |
| "loss": 1.6853, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001788770053475936, | |
| "loss": 1.558, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00017860962566844922, | |
| "loss": 1.7254, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00017834224598930483, | |
| "loss": 0.8146, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00017807486631016042, | |
| "loss": 1.6413, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00017780748663101604, | |
| "loss": 1.5349, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00017754010695187166, | |
| "loss": 1.5976, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00017727272727272728, | |
| "loss": 0.8735, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001770053475935829, | |
| "loss": 1.6917, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001767379679144385, | |
| "loss": 1.4532, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00017647058823529413, | |
| "loss": 1.2738, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00017620320855614975, | |
| "loss": 1.72, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00017593582887700534, | |
| "loss": 1.6528, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00017566844919786095, | |
| "loss": 0.6675, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00017540106951871657, | |
| "loss": 1.5316, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001751336898395722, | |
| "loss": 1.6082, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00017486631016042783, | |
| "loss": 0.8312, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00017459893048128345, | |
| "loss": 1.7084, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00017433155080213904, | |
| "loss": 0.8941, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00017406417112299466, | |
| "loss": 1.6595, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00017379679144385028, | |
| "loss": 0.8231, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0001735294117647059, | |
| "loss": 1.5094, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001732620320855615, | |
| "loss": 1.4382, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00017299465240641713, | |
| "loss": 1.6014, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00017272727272727275, | |
| "loss": 1.6579, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00017245989304812836, | |
| "loss": 1.7174, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00017219251336898395, | |
| "loss": 1.4742, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00017192513368983957, | |
| "loss": 1.6524, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001716577540106952, | |
| "loss": 1.475, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001713903743315508, | |
| "loss": 1.6018, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00017112299465240642, | |
| "loss": 1.592, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00017085561497326204, | |
| "loss": 0.7578, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00017058823529411766, | |
| "loss": 1.5841, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00017032085561497325, | |
| "loss": 1.4413, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00017005347593582887, | |
| "loss": 1.4614, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00016978609625668448, | |
| "loss": 1.5979, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0001695187165775401, | |
| "loss": 0.9329, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00016925133689839575, | |
| "loss": 1.6472, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00016898395721925136, | |
| "loss": 0.8343, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00016871657754010698, | |
| "loss": 1.8057, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00016844919786096257, | |
| "loss": 1.6294, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.0001681818181818182, | |
| "loss": 0.7687, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.0001679144385026738, | |
| "loss": 0.852, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00016764705882352942, | |
| "loss": 1.603, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00016737967914438504, | |
| "loss": 0.8358, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00016711229946524066, | |
| "loss": 1.6052, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00016684491978609628, | |
| "loss": 1.7125, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00016657754010695187, | |
| "loss": 0.8034, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00016631016042780748, | |
| "loss": 1.538, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.0001660427807486631, | |
| "loss": 1.5907, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00016577540106951872, | |
| "loss": 1.6852, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00016550802139037434, | |
| "loss": 0.8292, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00016524064171122995, | |
| "loss": 1.4263, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00016497326203208557, | |
| "loss": 0.816, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.0001647058823529412, | |
| "loss": 0.8648, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00016443850267379678, | |
| "loss": 1.5239, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.0001641711229946524, | |
| "loss": 0.8859, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00016390374331550801, | |
| "loss": 1.5103, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00016363636363636366, | |
| "loss": 1.6071, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00016336898395721928, | |
| "loss": 0.6449, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.0001631016042780749, | |
| "loss": 1.7182, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 0.00016283422459893048, | |
| "loss": 0.6098, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.0001625668449197861, | |
| "loss": 1.4725, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00016229946524064172, | |
| "loss": 1.5916, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00016203208556149734, | |
| "loss": 1.3847, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00016176470588235295, | |
| "loss": 1.556, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.00016149732620320857, | |
| "loss": 1.6262, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0001612299465240642, | |
| "loss": 1.5316, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.0001609625668449198, | |
| "loss": 0.0, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.0001606951871657754, | |
| "loss": 0.7013, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00016042780748663101, | |
| "loss": 1.734, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 0.00016016042780748663, | |
| "loss": 0.9174, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00015989304812834225, | |
| "loss": 0.6682, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.00015962566844919787, | |
| "loss": 0.6548, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00015935828877005348, | |
| "loss": 1.6577, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.0001590909090909091, | |
| "loss": 1.664, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.0001588235294117647, | |
| "loss": 0.8135, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "eval_loss": 1.4762990474700928, | |
| "eval_runtime": 1875.1125, | |
| "eval_samples_per_second": 1.067, | |
| "eval_steps_per_second": 0.067, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.0001585561497326203, | |
| "loss": 1.4309, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00015828877005347595, | |
| "loss": 0.8049, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.00015802139037433157, | |
| "loss": 1.6503, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.0001577540106951872, | |
| "loss": 1.4392, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.0001574866310160428, | |
| "loss": 0.709, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.0001572192513368984, | |
| "loss": 1.5783, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00015695187165775401, | |
| "loss": 0.65, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00015668449197860963, | |
| "loss": 0.7895, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00015641711229946525, | |
| "loss": 1.7057, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00015614973262032087, | |
| "loss": 0.8384, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.00015588235294117648, | |
| "loss": 0.8426, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 0.0001556149732620321, | |
| "loss": 1.5182, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00015534759358288772, | |
| "loss": 1.7321, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.0001550802139037433, | |
| "loss": 1.6542, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00015481283422459893, | |
| "loss": 1.4952, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00015454545454545454, | |
| "loss": 0.7496, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00015427807486631016, | |
| "loss": 1.4198, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00015401069518716578, | |
| "loss": 1.5103, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.0001537433155080214, | |
| "loss": 0.7112, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00015347593582887701, | |
| "loss": 1.5426, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00015320855614973263, | |
| "loss": 1.6245, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 0.00015294117647058822, | |
| "loss": 1.5267, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00015267379679144387, | |
| "loss": 1.572, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00015240641711229948, | |
| "loss": 1.4671, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.0001521390374331551, | |
| "loss": 1.6925, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00015187165775401072, | |
| "loss": 1.5266, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00015160427807486634, | |
| "loss": 1.54, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00015133689839572193, | |
| "loss": 1.4555, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00015106951871657754, | |
| "loss": 1.6631, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00015080213903743316, | |
| "loss": 1.5531, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.00015053475935828878, | |
| "loss": 1.4001, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 0.0001502673796791444, | |
| "loss": 0.7514, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 1.4462, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00014973262032085563, | |
| "loss": 0.6432, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00014946524064171122, | |
| "loss": 1.3762, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00014919786096256684, | |
| "loss": 1.6002, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00014893048128342246, | |
| "loss": 1.541, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00014866310160427807, | |
| "loss": 1.5357, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.0001483957219251337, | |
| "loss": 0.7652, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.0001481283422459893, | |
| "loss": 1.5464, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00014786096256684493, | |
| "loss": 1.6457, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 0.00014759358288770054, | |
| "loss": 1.4037, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00014732620320855613, | |
| "loss": 1.4554, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00014705882352941178, | |
| "loss": 1.6477, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.0001467914438502674, | |
| "loss": 0.797, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00014652406417112301, | |
| "loss": 1.5195, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00014625668449197863, | |
| "loss": 1.5903, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00014598930481283425, | |
| "loss": 0.699, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00014572192513368984, | |
| "loss": 1.4382, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.00014545454545454546, | |
| "loss": 1.6502, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.00014518716577540107, | |
| "loss": 1.6292, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 0.0001449197860962567, | |
| "loss": 0.7866, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.0001446524064171123, | |
| "loss": 1.4458, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00014438502673796793, | |
| "loss": 1.5261, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.00014411764705882354, | |
| "loss": 1.5819, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00014385026737967916, | |
| "loss": 1.6483, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00014358288770053475, | |
| "loss": 0.8272, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00014331550802139037, | |
| "loss": 1.6659, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.000143048128342246, | |
| "loss": 1.5782, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.0001427807486631016, | |
| "loss": 1.6261, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00014251336898395722, | |
| "loss": 1.481, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 0.00014224598930481284, | |
| "loss": 1.5098, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00014197860962566846, | |
| "loss": 0.8249, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00014171122994652405, | |
| "loss": 0.8338, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.0001414438502673797, | |
| "loss": 1.6072, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.0001411764705882353, | |
| "loss": 1.3685, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00014090909090909093, | |
| "loss": 1.4987, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00014064171122994654, | |
| "loss": 0.7157, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00014037433155080216, | |
| "loss": 1.5327, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 0.00014010695187165778, | |
| "loss": 0.8074, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00013983957219251337, | |
| "loss": 1.5529, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.000139572192513369, | |
| "loss": 1.5925, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.0001393048128342246, | |
| "loss": 1.5971, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00013903743315508022, | |
| "loss": 0.0, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00013877005347593584, | |
| "loss": 0.9138, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00013850267379679146, | |
| "loss": 1.541, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00013823529411764707, | |
| "loss": 0.7707, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00013796791443850266, | |
| "loss": 1.5394, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.00013770053475935828, | |
| "loss": 0.8505, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 0.0001374331550802139, | |
| "loss": 1.7186, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00013716577540106952, | |
| "loss": 1.6292, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00013689839572192513, | |
| "loss": 0.7517, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00013663101604278075, | |
| "loss": 1.4736, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00013636363636363637, | |
| "loss": 0.7838, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00013609625668449199, | |
| "loss": 1.4784, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.0001358288770053476, | |
| "loss": 1.4102, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.00013556149732620322, | |
| "loss": 1.3916, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00013529411764705884, | |
| "loss": 1.5464, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00013502673796791446, | |
| "loss": 1.4083, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 0.00013475935828877007, | |
| "loss": 0.8571, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.0001344919786096257, | |
| "loss": 1.641, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00013422459893048128, | |
| "loss": 1.6397, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0001339572192513369, | |
| "loss": 1.454, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00013368983957219252, | |
| "loss": 1.7245, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00013342245989304813, | |
| "loss": 1.3854, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00013315508021390375, | |
| "loss": 1.329, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00013288770053475937, | |
| "loss": 0.9579, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.00013262032085561499, | |
| "loss": 1.5257, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0001323529411764706, | |
| "loss": 1.4032, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 0.0001320855614973262, | |
| "loss": 1.3976, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.0001318181818181818, | |
| "loss": 1.5309, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00013155080213903743, | |
| "loss": 1.5358, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.00013128342245989305, | |
| "loss": 1.3189, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.00013101604278074866, | |
| "loss": 0.7508, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.00013074866310160428, | |
| "loss": 1.3527, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.0001304812834224599, | |
| "loss": 1.4635, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.00013021390374331552, | |
| "loss": 1.4307, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.00012994652406417113, | |
| "loss": 1.4932, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.00012967914438502675, | |
| "loss": 1.4041, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 0.00012941176470588237, | |
| "loss": 1.2893, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00012914438502673799, | |
| "loss": 0.6639, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.0001288770053475936, | |
| "loss": 0.6798, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.0001286096256684492, | |
| "loss": 1.5976, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.0001283422459893048, | |
| "loss": 1.3931, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.00012807486631016043, | |
| "loss": 1.5177, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00012780748663101605, | |
| "loss": 1.4111, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00012754010695187166, | |
| "loss": 0.8035, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.00012727272727272728, | |
| "loss": 1.3537, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.0001270053475935829, | |
| "loss": 1.3563, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.00012673796791443852, | |
| "loss": 1.368, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.0001264705882352941, | |
| "loss": 1.4295, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00012620320855614972, | |
| "loss": 1.3442, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00012593582887700534, | |
| "loss": 0.7788, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00012566844919786096, | |
| "loss": 1.5099, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00012540106951871658, | |
| "loss": 0.8558, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.0001251336898395722, | |
| "loss": 0.8357, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 0.0001248663101604278, | |
| "loss": 1.542, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00012459893048128343, | |
| "loss": 1.5548, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00012433155080213905, | |
| "loss": 1.472, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00012406417112299466, | |
| "loss": 0.0, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.00012379679144385028, | |
| "loss": 1.4032, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.0001235294117647059, | |
| "loss": 1.4866, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.00012326203208556152, | |
| "loss": 1.312, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.00012299465240641713, | |
| "loss": 1.5577, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.00012272727272727272, | |
| "loss": 1.3697, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00012245989304812834, | |
| "loss": 1.4776, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 0.00012219251336898396, | |
| "loss": 1.4517, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00012192513368983958, | |
| "loss": 1.4538, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.0001216577540106952, | |
| "loss": 1.364, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00012139037433155081, | |
| "loss": 0.6793, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00012112299465240642, | |
| "loss": 1.6474, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00012085561497326203, | |
| "loss": 1.3523, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00012058823529411765, | |
| "loss": 1.3526, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00012032085561497325, | |
| "loss": 1.5111, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00012005347593582887, | |
| "loss": 1.4862, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00011978609625668449, | |
| "loss": 1.4216, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.0001195187165775401, | |
| "loss": 1.4293, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00011925133689839574, | |
| "loss": 1.5012, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00011898395721925135, | |
| "loss": 1.5517, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00011871657754010697, | |
| "loss": 1.3785, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00011844919786096258, | |
| "loss": 1.5506, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.0001181818181818182, | |
| "loss": 0.7255, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00011791443850267381, | |
| "loss": 1.4841, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00011764705882352942, | |
| "loss": 1.5495, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00011737967914438503, | |
| "loss": 1.4021, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.00011711229946524065, | |
| "loss": 1.396, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "eval_loss": 1.4490689039230347, | |
| "eval_runtime": 1817.1431, | |
| "eval_samples_per_second": 1.101, | |
| "eval_steps_per_second": 0.069, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.00011684491978609627, | |
| "loss": 0.7482, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00011657754010695187, | |
| "loss": 1.5667, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00011631016042780749, | |
| "loss": 1.5188, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.0001160427807486631, | |
| "loss": 1.4705, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00011577540106951872, | |
| "loss": 1.3675, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00011550802139037433, | |
| "loss": 1.397, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00011524064171122995, | |
| "loss": 0.7067, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00011497326203208556, | |
| "loss": 1.6246, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00011470588235294118, | |
| "loss": 0.5517, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00011443850267379678, | |
| "loss": 1.3288, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.0001141711229946524, | |
| "loss": 1.5879, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.00011390374331550802, | |
| "loss": 0.6801, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.00011363636363636365, | |
| "loss": 1.4296, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.00011336898395721927, | |
| "loss": 1.3548, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00011310160427807488, | |
| "loss": 1.3336, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00011283422459893049, | |
| "loss": 1.4161, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.0001125668449197861, | |
| "loss": 1.4569, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00011229946524064172, | |
| "loss": 1.3923, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00011203208556149734, | |
| "loss": 0.7172, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00011176470588235294, | |
| "loss": 0.6755, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 0.00011149732620320856, | |
| "loss": 1.4229, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.00011122994652406418, | |
| "loss": 1.3606, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.0001109625668449198, | |
| "loss": 1.2319, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.0001106951871657754, | |
| "loss": 0.773, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00011042780748663102, | |
| "loss": 1.3415, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00011016042780748664, | |
| "loss": 1.4365, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00010989304812834224, | |
| "loss": 0.7139, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00010962566844919786, | |
| "loss": 0.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00010935828877005347, | |
| "loss": 0.667, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.00010909090909090909, | |
| "loss": 0.777, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 0.0001088235294117647, | |
| "loss": 1.5484, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00010855614973262031, | |
| "loss": 1.443, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00010828877005347596, | |
| "loss": 1.5211, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00010802139037433156, | |
| "loss": 1.5713, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.00010775401069518718, | |
| "loss": 0.7555, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.0001074866310160428, | |
| "loss": 1.6702, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.0001072192513368984, | |
| "loss": 1.2528, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.00010695187165775402, | |
| "loss": 1.5392, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.00010668449197860964, | |
| "loss": 1.476, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00010641711229946525, | |
| "loss": 1.3388, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00010614973262032086, | |
| "loss": 1.5115, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00010588235294117647, | |
| "loss": 1.4062, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00010561497326203209, | |
| "loss": 1.4835, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00010534759358288771, | |
| "loss": 1.328, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.00010508021390374331, | |
| "loss": 1.4516, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.00010481283422459893, | |
| "loss": 0.809, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00010454545454545455, | |
| "loss": 0.8017, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00010427807486631017, | |
| "loss": 0.7622, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 0.00010401069518716577, | |
| "loss": 0.8361, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.00010374331550802139, | |
| "loss": 1.5416, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.000103475935828877, | |
| "loss": 1.4239, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00010320855614973262, | |
| "loss": 1.4299, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00010294117647058823, | |
| "loss": 1.5841, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00010267379679144387, | |
| "loss": 0.6902, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00010240641711229947, | |
| "loss": 1.3335, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00010213903743315509, | |
| "loss": 0.7347, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00010187165775401071, | |
| "loss": 0.7148, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00010160427807486633, | |
| "loss": 0.7521, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 0.00010133689839572193, | |
| "loss": 1.4992, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00010106951871657755, | |
| "loss": 1.5119, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.00010080213903743317, | |
| "loss": 0.0, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.00010053475935828878, | |
| "loss": 1.5924, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.00010026737967914439, | |
| "loss": 1.3433, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.0001, | |
| "loss": 0.7781, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 9.973262032085562e-05, | |
| "loss": 1.3294, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 9.946524064171123e-05, | |
| "loss": 1.1677, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 9.919786096256684e-05, | |
| "loss": 1.4861, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 9.893048128342246e-05, | |
| "loss": 1.2648, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 9.866310160427808e-05, | |
| "loss": 0.0, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.83957219251337e-05, | |
| "loss": 1.5553, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.812834224598931e-05, | |
| "loss": 1.5253, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 9.786096256684493e-05, | |
| "loss": 1.2794, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 9.759358288770053e-05, | |
| "loss": 0.6316, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 9.732620320855615e-05, | |
| "loss": 0.0, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 9.705882352941177e-05, | |
| "loss": 1.5085, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 9.679144385026739e-05, | |
| "loss": 1.3594, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.652406417112299e-05, | |
| "loss": 1.4425, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.625668449197861e-05, | |
| "loss": 1.3698, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.598930481283423e-05, | |
| "loss": 1.421, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.572192513368984e-05, | |
| "loss": 0.7862, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.545454545454546e-05, | |
| "loss": 1.6053, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.518716577540108e-05, | |
| "loss": 1.4012, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.49197860962567e-05, | |
| "loss": 1.5663, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.46524064171123e-05, | |
| "loss": 1.3553, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.438502673796792e-05, | |
| "loss": 1.4471, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.411764705882353e-05, | |
| "loss": 1.5874, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.385026737967915e-05, | |
| "loss": 0.6658, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.358288770053476e-05, | |
| "loss": 1.3355, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.331550802139037e-05, | |
| "loss": 1.4611, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.3048128342246e-05, | |
| "loss": 1.4876, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.278074866310161e-05, | |
| "loss": 1.4782, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.251336898395723e-05, | |
| "loss": 1.6359, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.224598930481284e-05, | |
| "loss": 1.3812, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.197860962566846e-05, | |
| "loss": 1.4893, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.171122994652406e-05, | |
| "loss": 1.3225, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.144385026737968e-05, | |
| "loss": 1.453, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.11764705882353e-05, | |
| "loss": 1.4077, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.090909090909092e-05, | |
| "loss": 1.5797, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.064171122994652e-05, | |
| "loss": 0.7328, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.037433155080214e-05, | |
| "loss": 0.6501, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.010695187165777e-05, | |
| "loss": 0.7809, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.983957219251337e-05, | |
| "loss": 1.3834, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.957219251336899e-05, | |
| "loss": 1.4274, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.930481283422461e-05, | |
| "loss": 1.5683, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.903743315508021e-05, | |
| "loss": 1.5326, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.877005347593583e-05, | |
| "loss": 0.4335, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.850267379679145e-05, | |
| "loss": 1.3797, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.823529411764706e-05, | |
| "loss": 1.3507, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.796791443850267e-05, | |
| "loss": 1.2687, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.770053475935829e-05, | |
| "loss": 1.4994, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.743315508021392e-05, | |
| "loss": 1.2264, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.716577540106952e-05, | |
| "loss": 1.498, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.689839572192514e-05, | |
| "loss": 1.158, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.663101604278076e-05, | |
| "loss": 0.0, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.636363636363637e-05, | |
| "loss": 0.8047, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.609625668449198e-05, | |
| "loss": 1.3758, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.58288770053476e-05, | |
| "loss": 1.4653, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.556149732620321e-05, | |
| "loss": 1.4074, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.529411764705883e-05, | |
| "loss": 1.5387, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.502673796791443e-05, | |
| "loss": 1.4792, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.475935828877005e-05, | |
| "loss": 0.6999, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.449197860962568e-05, | |
| "loss": 0.7177, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.422459893048129e-05, | |
| "loss": 1.5499, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.39572192513369e-05, | |
| "loss": 1.4556, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 8.368983957219252e-05, | |
| "loss": 1.5381, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 8.342245989304814e-05, | |
| "loss": 1.2546, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 8.315508021390374e-05, | |
| "loss": 1.4273, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 8.288770053475936e-05, | |
| "loss": 1.7713, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 8.262032085561498e-05, | |
| "loss": 1.4537, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.23529411764706e-05, | |
| "loss": 1.5588, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.20855614973262e-05, | |
| "loss": 1.4436, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 8.181818181818183e-05, | |
| "loss": 1.2643, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 8.155080213903745e-05, | |
| "loss": 1.2508, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 8.128342245989305e-05, | |
| "loss": 1.3579, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.101604278074867e-05, | |
| "loss": 1.4784, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.074866310160429e-05, | |
| "loss": 1.4132, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 8.04812834224599e-05, | |
| "loss": 0.7507, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 8.021390374331551e-05, | |
| "loss": 1.5176, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.994652406417112e-05, | |
| "loss": 1.4306, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.967914438502674e-05, | |
| "loss": 0.5959, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.941176470588235e-05, | |
| "loss": 1.6221, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.914438502673798e-05, | |
| "loss": 0.7004, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.88770053475936e-05, | |
| "loss": 1.5374, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.86096256684492e-05, | |
| "loss": 1.5912, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.834224598930482e-05, | |
| "loss": 0.6562, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.807486631016043e-05, | |
| "loss": 0.6382, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.780748663101605e-05, | |
| "loss": 1.4506, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.754010695187165e-05, | |
| "loss": 1.4908, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 7.727272727272727e-05, | |
| "loss": 1.5851, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 7.700534759358289e-05, | |
| "loss": 0.7288, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 7.673796791443851e-05, | |
| "loss": 1.4052, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 7.647058823529411e-05, | |
| "loss": 1.4992, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 7.620320855614974e-05, | |
| "loss": 1.359, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 7.593582887700536e-05, | |
| "loss": 0.7587, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 7.566844919786096e-05, | |
| "loss": 1.5194, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 7.540106951871658e-05, | |
| "loss": 1.4519, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "eval_loss": 1.4598816633224487, | |
| "eval_runtime": 1929.1202, | |
| "eval_samples_per_second": 1.037, | |
| "eval_steps_per_second": 0.065, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 7.51336898395722e-05, | |
| "loss": 0.645, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 7.486631016042782e-05, | |
| "loss": 0.7295, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 7.459893048128342e-05, | |
| "loss": 1.3374, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 7.433155080213904e-05, | |
| "loss": 0.7399, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 7.406417112299465e-05, | |
| "loss": 1.3815, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 7.379679144385027e-05, | |
| "loss": 1.3802, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 7.352941176470589e-05, | |
| "loss": 1.3687, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 7.326203208556151e-05, | |
| "loss": 0.8355, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 7.299465240641712e-05, | |
| "loss": 1.469, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 7.272727272727273e-05, | |
| "loss": 1.4634, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.245989304812835e-05, | |
| "loss": 1.4963, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.219251336898396e-05, | |
| "loss": 1.3808, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.192513368983958e-05, | |
| "loss": 1.5888, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 7.165775401069518e-05, | |
| "loss": 1.1737, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 7.13903743315508e-05, | |
| "loss": 1.4853, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.112299465240642e-05, | |
| "loss": 1.2292, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.085561497326202e-05, | |
| "loss": 0.7759, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.058823529411765e-05, | |
| "loss": 1.4033, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.032085561497327e-05, | |
| "loss": 1.4443, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.005347593582889e-05, | |
| "loss": 0.0, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 6.97860962566845e-05, | |
| "loss": 0.7743, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 6.951871657754011e-05, | |
| "loss": 1.4706, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 6.925133689839573e-05, | |
| "loss": 1.4723, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 6.898395721925133e-05, | |
| "loss": 1.3785, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 6.871657754010695e-05, | |
| "loss": 1.3848, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 6.844919786096257e-05, | |
| "loss": 1.362, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 6.818181818181818e-05, | |
| "loss": 0.7792, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 6.79144385026738e-05, | |
| "loss": 1.4883, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.764705882352942e-05, | |
| "loss": 1.3965, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 6.737967914438504e-05, | |
| "loss": 1.3523, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 6.711229946524064e-05, | |
| "loss": 1.5477, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 6.684491978609626e-05, | |
| "loss": 0.7701, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 6.657754010695188e-05, | |
| "loss": 1.3462, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 6.631016042780749e-05, | |
| "loss": 1.2878, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 6.60427807486631e-05, | |
| "loss": 1.2517, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 6.577540106951871e-05, | |
| "loss": 0.6838, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 6.550802139037433e-05, | |
| "loss": 0.5911, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 6.524064171122995e-05, | |
| "loss": 1.3103, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 6.497326203208557e-05, | |
| "loss": 1.4199, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 6.470588235294118e-05, | |
| "loss": 1.4253, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 6.44385026737968e-05, | |
| "loss": 0.5759, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 6.41711229946524e-05, | |
| "loss": 1.3032, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 6.390374331550802e-05, | |
| "loss": 0.6768, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 6.363636363636364e-05, | |
| "loss": 1.5451, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 6.336898395721926e-05, | |
| "loss": 1.325, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 6.310160427807486e-05, | |
| "loss": 0.5231, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 6.283422459893048e-05, | |
| "loss": 1.2246, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 6.25668449197861e-05, | |
| "loss": 1.2088, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 6.229946524064171e-05, | |
| "loss": 0.6522, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 6.203208556149733e-05, | |
| "loss": 1.2557, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.176470588235295e-05, | |
| "loss": 0.6342, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.149732620320857e-05, | |
| "loss": 1.2905, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 6.122994652406417e-05, | |
| "loss": 0.7026, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 6.096256684491979e-05, | |
| "loss": 0.0, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 6.0695187165775406e-05, | |
| "loss": 1.3462, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.0427807486631016e-05, | |
| "loss": 0.6842, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 6.016042780748663e-05, | |
| "loss": 1.1985, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 5.9893048128342244e-05, | |
| "loss": 1.2946, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 5.962566844919787e-05, | |
| "loss": 0.5533, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 5.9358288770053486e-05, | |
| "loss": 1.3705, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 5.90909090909091e-05, | |
| "loss": 1.3784, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 5.882352941176471e-05, | |
| "loss": 1.4377, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 5.8556149732620325e-05, | |
| "loss": 1.5401, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 5.8288770053475936e-05, | |
| "loss": 0.7267, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 5.802139037433155e-05, | |
| "loss": 1.4631, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 5.7754010695187164e-05, | |
| "loss": 1.2999, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 5.748663101604278e-05, | |
| "loss": 1.4007, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 5.721925133689839e-05, | |
| "loss": 1.2896, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 5.695187165775401e-05, | |
| "loss": 0.6588, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 5.6684491978609634e-05, | |
| "loss": 1.3118, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 5.6417112299465244e-05, | |
| "loss": 1.4214, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 5.614973262032086e-05, | |
| "loss": 0.707, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 5.588235294117647e-05, | |
| "loss": 0.6199, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 5.561497326203209e-05, | |
| "loss": 1.6548, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 5.53475935828877e-05, | |
| "loss": 0.6053, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 5.508021390374332e-05, | |
| "loss": 0.6968, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 5.481283422459893e-05, | |
| "loss": 1.3714, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 5.4545454545454546e-05, | |
| "loss": 1.4276, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.427807486631016e-05, | |
| "loss": 1.3744, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 5.401069518716578e-05, | |
| "loss": 1.4243, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.37433155080214e-05, | |
| "loss": 0.6886, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.347593582887701e-05, | |
| "loss": 1.3409, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 5.320855614973263e-05, | |
| "loss": 1.2544, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.294117647058824e-05, | |
| "loss": 1.5045, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 5.2673796791443855e-05, | |
| "loss": 1.3408, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.2406417112299466e-05, | |
| "loss": 0.5976, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.213903743315508e-05, | |
| "loss": 0.6946, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 5.1871657754010694e-05, | |
| "loss": 0.7075, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.160427807486631e-05, | |
| "loss": 1.3134, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 5.1336898395721935e-05, | |
| "loss": 0.6443, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.1069518716577546e-05, | |
| "loss": 1.2005, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.0802139037433164e-05, | |
| "loss": 1.2439, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 5.0534759358288774e-05, | |
| "loss": 1.2592, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5.026737967914439e-05, | |
| "loss": 1.2921, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 5e-05, | |
| "loss": 0.6906, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.973262032085561e-05, | |
| "loss": 1.5856, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.946524064171123e-05, | |
| "loss": 1.2068, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.919786096256685e-05, | |
| "loss": 1.5172, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 4.8930481283422465e-05, | |
| "loss": 1.2724, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 4.8663101604278076e-05, | |
| "loss": 1.3549, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.8395721925133694e-05, | |
| "loss": 1.2922, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.8128342245989304e-05, | |
| "loss": 1.1215, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 4.786096256684492e-05, | |
| "loss": 1.2911, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.759358288770054e-05, | |
| "loss": 1.2171, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.732620320855615e-05, | |
| "loss": 1.1559, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.705882352941177e-05, | |
| "loss": 1.3786, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.679144385026738e-05, | |
| "loss": 1.3532, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 4.6524064171123e-05, | |
| "loss": 1.326, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.625668449197861e-05, | |
| "loss": 1.3435, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.598930481283423e-05, | |
| "loss": 0.7063, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.572192513368984e-05, | |
| "loss": 1.294, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.545454545454546e-05, | |
| "loss": 0.6401, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 4.518716577540107e-05, | |
| "loss": 1.4263, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.491978609625669e-05, | |
| "loss": 1.4469, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.4652406417112304e-05, | |
| "loss": 1.2339, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.4385026737967915e-05, | |
| "loss": 1.4478, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.411764705882353e-05, | |
| "loss": 0.5989, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 4.385026737967914e-05, | |
| "loss": 0.7184, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.358288770053476e-05, | |
| "loss": 1.3133, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.331550802139038e-05, | |
| "loss": 0.6392, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.304812834224599e-05, | |
| "loss": 1.2456, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.2780748663101606e-05, | |
| "loss": 0.5998, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 4.251336898395722e-05, | |
| "loss": 1.2969, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.224598930481284e-05, | |
| "loss": 1.463, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.197860962566845e-05, | |
| "loss": 1.3047, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.171122994652407e-05, | |
| "loss": 1.3019, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.144385026737968e-05, | |
| "loss": 1.4478, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 4.11764705882353e-05, | |
| "loss": 1.1726, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.0909090909090915e-05, | |
| "loss": 0.7182, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.0641711229946525e-05, | |
| "loss": 1.5264, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.037433155080214e-05, | |
| "loss": 1.2031, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 4.0106951871657754e-05, | |
| "loss": 1.251, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 3.983957219251337e-05, | |
| "loss": 1.3863, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 3.957219251336899e-05, | |
| "loss": 1.3897, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 3.93048128342246e-05, | |
| "loss": 1.3974, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.903743315508022e-05, | |
| "loss": 1.4126, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.877005347593583e-05, | |
| "loss": 1.3108, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.8502673796791445e-05, | |
| "loss": 1.4013, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.8235294117647055e-05, | |
| "loss": 0.5406, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.796791443850268e-05, | |
| "loss": 0.6561, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.770053475935829e-05, | |
| "loss": 1.3784, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.743315508021391e-05, | |
| "loss": 0.6025, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 3.716577540106952e-05, | |
| "loss": 0.6405, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.6898395721925136e-05, | |
| "loss": 0.0, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.6631016042780753e-05, | |
| "loss": 1.2733, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.6363636363636364e-05, | |
| "loss": 1.322, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.609625668449198e-05, | |
| "loss": 0.6051, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 3.582887700534759e-05, | |
| "loss": 1.2809, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.556149732620321e-05, | |
| "loss": 1.2519, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.529411764705883e-05, | |
| "loss": 1.3455, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.5026737967914445e-05, | |
| "loss": 1.3249, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.4759358288770055e-05, | |
| "loss": 1.386, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 3.4491978609625666e-05, | |
| "loss": 0.6617, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.4224598930481284e-05, | |
| "loss": 1.3334, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.39572192513369e-05, | |
| "loss": 1.3032, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.368983957219252e-05, | |
| "loss": 1.316, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "eval_loss": 1.4671682119369507, | |
| "eval_runtime": 1873.2618, | |
| "eval_samples_per_second": 1.068, | |
| "eval_steps_per_second": 0.067, | |
| "step": 624 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 750, | |
| "num_train_epochs": 3, | |
| "save_steps": 156, | |
| "total_flos": 5.868557043695616e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |