Training in progress, step 363, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 17425352
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:70cc9bfc54e5704a0456169a978825609ac71b7b1c9c38de547b1a30937e3ae5
|
3 |
size 17425352
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 10252116
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f0ffc6285cb67ef2253eac085c595861b394d0f4ef88ea3bf8478c9a9b37965
|
3 |
size 10252116
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a5aecdf1cddcfbe1387576f944dfa673d525856b11837f99ce53a5e262b12e4b
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3ac5c649b3b07ad0898720745fd06763ece21dd621fd1e465e3bf3dd609d7456
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch":
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -1918,6 +1918,644 @@
|
|
1918 |
"learning_rate": 1.4800129365390281e-05,
|
1919 |
"loss": 2.1697,
|
1920 |
"step": 273
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1921 |
}
|
1922 |
],
|
1923 |
"logging_steps": 1,
|
@@ -1932,12 +2570,12 @@
|
|
1932 |
"should_evaluate": false,
|
1933 |
"should_log": false,
|
1934 |
"should_save": true,
|
1935 |
-
"should_training_stop":
|
1936 |
},
|
1937 |
"attributes": {}
|
1938 |
}
|
1939 |
},
|
1940 |
-
"total_flos":
|
1941 |
"train_batch_size": 4,
|
1942 |
"trial_name": null,
|
1943 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0020703933747412,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 363,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
1918 |
"learning_rate": 1.4800129365390281e-05,
|
1919 |
"loss": 2.1697,
|
1920 |
"step": 273
|
1921 |
+
},
|
1922 |
+
{
|
1923 |
+
"epoch": 0.756383712905452,
|
1924 |
+
"grad_norm": 0.6054173707962036,
|
1925 |
+
"learning_rate": 1.4489873172477409e-05,
|
1926 |
+
"loss": 1.6706,
|
1927 |
+
"step": 274
|
1928 |
+
},
|
1929 |
+
{
|
1930 |
+
"epoch": 0.759144237405107,
|
1931 |
+
"grad_norm": 0.7372981905937195,
|
1932 |
+
"learning_rate": 1.4182351512311237e-05,
|
1933 |
+
"loss": 1.7511,
|
1934 |
+
"step": 275
|
1935 |
+
},
|
1936 |
+
{
|
1937 |
+
"epoch": 0.7619047619047619,
|
1938 |
+
"grad_norm": 0.6251423954963684,
|
1939 |
+
"learning_rate": 1.3877588066250453e-05,
|
1940 |
+
"loss": 1.423,
|
1941 |
+
"step": 276
|
1942 |
+
},
|
1943 |
+
{
|
1944 |
+
"epoch": 0.7646652864044169,
|
1945 |
+
"grad_norm": 0.6341333985328674,
|
1946 |
+
"learning_rate": 1.357560630325158e-05,
|
1947 |
+
"loss": 1.9509,
|
1948 |
+
"step": 277
|
1949 |
+
},
|
1950 |
+
{
|
1951 |
+
"epoch": 0.7674258109040718,
|
1952 |
+
"grad_norm": 0.6252625584602356,
|
1953 |
+
"learning_rate": 1.3276429478061741e-05,
|
1954 |
+
"loss": 1.992,
|
1955 |
+
"step": 278
|
1956 |
+
},
|
1957 |
+
{
|
1958 |
+
"epoch": 0.7701863354037267,
|
1959 |
+
"grad_norm": 0.6359195709228516,
|
1960 |
+
"learning_rate": 1.2980080629427904e-05,
|
1961 |
+
"loss": 1.9086,
|
1962 |
+
"step": 279
|
1963 |
+
},
|
1964 |
+
{
|
1965 |
+
"epoch": 0.7729468599033816,
|
1966 |
+
"grad_norm": 0.8791253566741943,
|
1967 |
+
"learning_rate": 1.2686582578322631e-05,
|
1968 |
+
"loss": 1.8734,
|
1969 |
+
"step": 280
|
1970 |
+
},
|
1971 |
+
{
|
1972 |
+
"epoch": 0.7757073844030365,
|
1973 |
+
"grad_norm": 0.6764018535614014,
|
1974 |
+
"learning_rate": 1.2395957926186803e-05,
|
1975 |
+
"loss": 1.83,
|
1976 |
+
"step": 281
|
1977 |
+
},
|
1978 |
+
{
|
1979 |
+
"epoch": 0.7784679089026915,
|
1980 |
+
"grad_norm": 0.752800703048706,
|
1981 |
+
"learning_rate": 1.2108229053189097e-05,
|
1982 |
+
"loss": 1.7646,
|
1983 |
+
"step": 282
|
1984 |
+
},
|
1985 |
+
{
|
1986 |
+
"epoch": 0.7812284334023465,
|
1987 |
+
"grad_norm": 0.7744107842445374,
|
1988 |
+
"learning_rate": 1.1823418116502565e-05,
|
1989 |
+
"loss": 1.9604,
|
1990 |
+
"step": 283
|
1991 |
+
},
|
1992 |
+
{
|
1993 |
+
"epoch": 0.7839889579020014,
|
1994 |
+
"grad_norm": 0.8803069591522217,
|
1995 |
+
"learning_rate": 1.1541547048598383e-05,
|
1996 |
+
"loss": 2.4493,
|
1997 |
+
"step": 284
|
1998 |
+
},
|
1999 |
+
{
|
2000 |
+
"epoch": 0.7867494824016563,
|
2001 |
+
"grad_norm": 0.834362268447876,
|
2002 |
+
"learning_rate": 1.1262637555556903e-05,
|
2003 |
+
"loss": 2.0796,
|
2004 |
+
"step": 285
|
2005 |
+
},
|
2006 |
+
{
|
2007 |
+
"epoch": 0.7895100069013112,
|
2008 |
+
"grad_norm": 1.0516443252563477,
|
2009 |
+
"learning_rate": 1.0986711115396058e-05,
|
2010 |
+
"loss": 2.3237,
|
2011 |
+
"step": 286
|
2012 |
+
},
|
2013 |
+
{
|
2014 |
+
"epoch": 0.7922705314009661,
|
2015 |
+
"grad_norm": 0.7121506929397583,
|
2016 |
+
"learning_rate": 1.071378897641752e-05,
|
2017 |
+
"loss": 2.1616,
|
2018 |
+
"step": 287
|
2019 |
+
},
|
2020 |
+
{
|
2021 |
+
"epoch": 0.7950310559006211,
|
2022 |
+
"grad_norm": 0.6001153588294983,
|
2023 |
+
"learning_rate": 1.044389215557034e-05,
|
2024 |
+
"loss": 1.7798,
|
2025 |
+
"step": 288
|
2026 |
+
},
|
2027 |
+
{
|
2028 |
+
"epoch": 0.7977915804002761,
|
2029 |
+
"grad_norm": 0.8006875514984131,
|
2030 |
+
"learning_rate": 1.0177041436832507e-05,
|
2031 |
+
"loss": 2.3374,
|
2032 |
+
"step": 289
|
2033 |
+
},
|
2034 |
+
{
|
2035 |
+
"epoch": 0.800552104899931,
|
2036 |
+
"grad_norm": 0.6787489652633667,
|
2037 |
+
"learning_rate": 9.913257369610473e-06,
|
2038 |
+
"loss": 2.1637,
|
2039 |
+
"step": 290
|
2040 |
+
},
|
2041 |
+
{
|
2042 |
+
"epoch": 0.8033126293995859,
|
2043 |
+
"grad_norm": 0.8265159130096436,
|
2044 |
+
"learning_rate": 9.652560267156647e-06,
|
2045 |
+
"loss": 2.246,
|
2046 |
+
"step": 291
|
2047 |
+
},
|
2048 |
+
{
|
2049 |
+
"epoch": 0.8060731538992408,
|
2050 |
+
"grad_norm": 1.0507712364196777,
|
2051 |
+
"learning_rate": 9.394970205005177e-06,
|
2052 |
+
"loss": 1.8864,
|
2053 |
+
"step": 292
|
2054 |
+
},
|
2055 |
+
{
|
2056 |
+
"epoch": 0.8088336783988958,
|
2057 |
+
"grad_norm": 0.9701467752456665,
|
2058 |
+
"learning_rate": 9.140507019425981e-06,
|
2059 |
+
"loss": 2.4656,
|
2060 |
+
"step": 293
|
2061 |
+
},
|
2062 |
+
{
|
2063 |
+
"epoch": 0.8115942028985508,
|
2064 |
+
"grad_norm": 0.9829404950141907,
|
2065 |
+
"learning_rate": 8.88919030589721e-06,
|
2066 |
+
"loss": 1.8753,
|
2067 |
+
"step": 294
|
2068 |
+
},
|
2069 |
+
{
|
2070 |
+
"epoch": 0.8143547273982057,
|
2071 |
+
"grad_norm": 1.346711277961731,
|
2072 |
+
"learning_rate": 8.64103941759618e-06,
|
2073 |
+
"loss": 1.905,
|
2074 |
+
"step": 295
|
2075 |
+
},
|
2076 |
+
{
|
2077 |
+
"epoch": 0.8171152518978606,
|
2078 |
+
"grad_norm": 1.1422017812728882,
|
2079 |
+
"learning_rate": 8.39607346390921e-06,
|
2080 |
+
"loss": 2.1189,
|
2081 |
+
"step": 296
|
2082 |
+
},
|
2083 |
+
{
|
2084 |
+
"epoch": 0.8198757763975155,
|
2085 |
+
"grad_norm": 1.2060613632202148,
|
2086 |
+
"learning_rate": 8.15431130895991e-06,
|
2087 |
+
"loss": 1.5586,
|
2088 |
+
"step": 297
|
2089 |
+
},
|
2090 |
+
{
|
2091 |
+
"epoch": 0.8226363008971704,
|
2092 |
+
"grad_norm": 1.0204120874404907,
|
2093 |
+
"learning_rate": 7.915771570156554e-06,
|
2094 |
+
"loss": 1.854,
|
2095 |
+
"step": 298
|
2096 |
+
},
|
2097 |
+
{
|
2098 |
+
"epoch": 0.8253968253968254,
|
2099 |
+
"grad_norm": 1.0156620740890503,
|
2100 |
+
"learning_rate": 7.680472616758466e-06,
|
2101 |
+
"loss": 1.8096,
|
2102 |
+
"step": 299
|
2103 |
+
},
|
2104 |
+
{
|
2105 |
+
"epoch": 0.8281573498964804,
|
2106 |
+
"grad_norm": 1.82632577419281,
|
2107 |
+
"learning_rate": 7.448432568461344e-06,
|
2108 |
+
"loss": 1.8969,
|
2109 |
+
"step": 300
|
2110 |
+
},
|
2111 |
+
{
|
2112 |
+
"epoch": 0.8309178743961353,
|
2113 |
+
"grad_norm": 0.6277664303779602,
|
2114 |
+
"learning_rate": 7.219669294002002e-06,
|
2115 |
+
"loss": 2.484,
|
2116 |
+
"step": 301
|
2117 |
+
},
|
2118 |
+
{
|
2119 |
+
"epoch": 0.8336783988957902,
|
2120 |
+
"grad_norm": 0.6709372401237488,
|
2121 |
+
"learning_rate": 6.9942004097823535e-06,
|
2122 |
+
"loss": 2.0705,
|
2123 |
+
"step": 302
|
2124 |
+
},
|
2125 |
+
{
|
2126 |
+
"epoch": 0.8364389233954451,
|
2127 |
+
"grad_norm": 0.6607434153556824,
|
2128 |
+
"learning_rate": 6.7720432785127465e-06,
|
2129 |
+
"loss": 1.9909,
|
2130 |
+
"step": 303
|
2131 |
+
},
|
2132 |
+
{
|
2133 |
+
"epoch": 0.8391994478951,
|
2134 |
+
"grad_norm": 0.5632132291793823,
|
2135 |
+
"learning_rate": 6.553215007874985e-06,
|
2136 |
+
"loss": 1.9455,
|
2137 |
+
"step": 304
|
2138 |
+
},
|
2139 |
+
{
|
2140 |
+
"epoch": 0.841959972394755,
|
2141 |
+
"grad_norm": 0.541318416595459,
|
2142 |
+
"learning_rate": 6.337732449204886e-06,
|
2143 |
+
"loss": 2.0353,
|
2144 |
+
"step": 305
|
2145 |
+
},
|
2146 |
+
{
|
2147 |
+
"epoch": 0.84472049689441,
|
2148 |
+
"grad_norm": 0.7076634168624878,
|
2149 |
+
"learning_rate": 6.1256121961945915e-06,
|
2150 |
+
"loss": 1.7995,
|
2151 |
+
"step": 306
|
2152 |
+
},
|
2153 |
+
{
|
2154 |
+
"epoch": 0.8474810213940649,
|
2155 |
+
"grad_norm": 0.6370120644569397,
|
2156 |
+
"learning_rate": 5.916870583614792e-06,
|
2157 |
+
"loss": 1.9695,
|
2158 |
+
"step": 307
|
2159 |
+
},
|
2160 |
+
{
|
2161 |
+
"epoch": 0.8502415458937198,
|
2162 |
+
"grad_norm": 0.5746281147003174,
|
2163 |
+
"learning_rate": 5.711523686056769e-06,
|
2164 |
+
"loss": 2.2275,
|
2165 |
+
"step": 308
|
2166 |
+
},
|
2167 |
+
{
|
2168 |
+
"epoch": 0.8530020703933747,
|
2169 |
+
"grad_norm": 0.6816925406455994,
|
2170 |
+
"learning_rate": 5.509587316694536e-06,
|
2171 |
+
"loss": 1.8413,
|
2172 |
+
"step": 309
|
2173 |
+
},
|
2174 |
+
{
|
2175 |
+
"epoch": 0.8557625948930296,
|
2176 |
+
"grad_norm": 0.7342681288719177,
|
2177 |
+
"learning_rate": 5.311077026067196e-06,
|
2178 |
+
"loss": 2.1986,
|
2179 |
+
"step": 310
|
2180 |
+
},
|
2181 |
+
{
|
2182 |
+
"epoch": 0.8585231193926847,
|
2183 |
+
"grad_norm": 0.6418979167938232,
|
2184 |
+
"learning_rate": 5.116008100881348e-06,
|
2185 |
+
"loss": 1.8917,
|
2186 |
+
"step": 311
|
2187 |
+
},
|
2188 |
+
{
|
2189 |
+
"epoch": 0.8612836438923396,
|
2190 |
+
"grad_norm": 0.6395264863967896,
|
2191 |
+
"learning_rate": 4.924395562833933e-06,
|
2192 |
+
"loss": 1.9768,
|
2193 |
+
"step": 312
|
2194 |
+
},
|
2195 |
+
{
|
2196 |
+
"epoch": 0.8640441683919945,
|
2197 |
+
"grad_norm": 0.6867514252662659,
|
2198 |
+
"learning_rate": 4.736254167455473e-06,
|
2199 |
+
"loss": 2.2776,
|
2200 |
+
"step": 313
|
2201 |
+
},
|
2202 |
+
{
|
2203 |
+
"epoch": 0.8668046928916494,
|
2204 |
+
"grad_norm": 0.5997049808502197,
|
2205 |
+
"learning_rate": 4.5515984029737615e-06,
|
2206 |
+
"loss": 1.8973,
|
2207 |
+
"step": 314
|
2208 |
+
},
|
2209 |
+
{
|
2210 |
+
"epoch": 0.8695652173913043,
|
2211 |
+
"grad_norm": 0.7374957799911499,
|
2212 |
+
"learning_rate": 4.370442489198179e-06,
|
2213 |
+
"loss": 1.8923,
|
2214 |
+
"step": 315
|
2215 |
+
},
|
2216 |
+
{
|
2217 |
+
"epoch": 0.8723257418909592,
|
2218 |
+
"grad_norm": 0.809410035610199,
|
2219 |
+
"learning_rate": 4.1928003764246934e-06,
|
2220 |
+
"loss": 2.3245,
|
2221 |
+
"step": 316
|
2222 |
+
},
|
2223 |
+
{
|
2224 |
+
"epoch": 0.8750862663906143,
|
2225 |
+
"grad_norm": 0.5919080972671509,
|
2226 |
+
"learning_rate": 4.018685744361539e-06,
|
2227 |
+
"loss": 1.9618,
|
2228 |
+
"step": 317
|
2229 |
+
},
|
2230 |
+
{
|
2231 |
+
"epoch": 0.8778467908902692,
|
2232 |
+
"grad_norm": 0.6320842504501343,
|
2233 |
+
"learning_rate": 3.84811200107581e-06,
|
2234 |
+
"loss": 1.6811,
|
2235 |
+
"step": 318
|
2236 |
+
},
|
2237 |
+
{
|
2238 |
+
"epoch": 0.8806073153899241,
|
2239 |
+
"grad_norm": 0.7248905301094055,
|
2240 |
+
"learning_rate": 3.6810922819609352e-06,
|
2241 |
+
"loss": 2.2784,
|
2242 |
+
"step": 319
|
2243 |
+
},
|
2244 |
+
{
|
2245 |
+
"epoch": 0.883367839889579,
|
2246 |
+
"grad_norm": 0.5608075261116028,
|
2247 |
+
"learning_rate": 3.517639448725163e-06,
|
2248 |
+
"loss": 1.6641,
|
2249 |
+
"step": 320
|
2250 |
+
},
|
2251 |
+
{
|
2252 |
+
"epoch": 0.8861283643892339,
|
2253 |
+
"grad_norm": 0.6745538711547852,
|
2254 |
+
"learning_rate": 3.3577660884011485e-06,
|
2255 |
+
"loss": 2.1938,
|
2256 |
+
"step": 321
|
2257 |
+
},
|
2258 |
+
{
|
2259 |
+
"epoch": 0.8888888888888888,
|
2260 |
+
"grad_norm": 0.6236209273338318,
|
2261 |
+
"learning_rate": 3.2014845123765734e-06,
|
2262 |
+
"loss": 1.7981,
|
2263 |
+
"step": 322
|
2264 |
+
},
|
2265 |
+
{
|
2266 |
+
"epoch": 0.8916494133885439,
|
2267 |
+
"grad_norm": 0.7572169303894043,
|
2268 |
+
"learning_rate": 3.0488067554461818e-06,
|
2269 |
+
"loss": 1.9801,
|
2270 |
+
"step": 323
|
2271 |
+
},
|
2272 |
+
{
|
2273 |
+
"epoch": 0.8944099378881988,
|
2274 |
+
"grad_norm": 0.5115662217140198,
|
2275 |
+
"learning_rate": 2.8997445748849716e-06,
|
2276 |
+
"loss": 1.6648,
|
2277 |
+
"step": 324
|
2278 |
+
},
|
2279 |
+
{
|
2280 |
+
"epoch": 0.8971704623878537,
|
2281 |
+
"grad_norm": 0.7543956637382507,
|
2282 |
+
"learning_rate": 2.7543094495427913e-06,
|
2283 |
+
"loss": 1.8281,
|
2284 |
+
"step": 325
|
2285 |
+
},
|
2286 |
+
{
|
2287 |
+
"epoch": 0.8999309868875086,
|
2288 |
+
"grad_norm": 0.5938908457756042,
|
2289 |
+
"learning_rate": 2.612512578960391e-06,
|
2290 |
+
"loss": 1.9528,
|
2291 |
+
"step": 326
|
2292 |
+
},
|
2293 |
+
{
|
2294 |
+
"epoch": 0.9026915113871635,
|
2295 |
+
"grad_norm": 0.6712254881858826,
|
2296 |
+
"learning_rate": 2.474364882507002e-06,
|
2297 |
+
"loss": 2.0534,
|
2298 |
+
"step": 327
|
2299 |
+
},
|
2300 |
+
{
|
2301 |
+
"epoch": 0.9054520358868184,
|
2302 |
+
"grad_norm": 0.5828601121902466,
|
2303 |
+
"learning_rate": 2.339876998539442e-06,
|
2304 |
+
"loss": 1.7284,
|
2305 |
+
"step": 328
|
2306 |
+
},
|
2307 |
+
{
|
2308 |
+
"epoch": 0.9082125603864735,
|
2309 |
+
"grad_norm": 0.8522250056266785,
|
2310 |
+
"learning_rate": 2.2090592835828814e-06,
|
2311 |
+
"loss": 2.1892,
|
2312 |
+
"step": 329
|
2313 |
+
},
|
2314 |
+
{
|
2315 |
+
"epoch": 0.9109730848861284,
|
2316 |
+
"grad_norm": 0.9318490028381348,
|
2317 |
+
"learning_rate": 2.081921811533366e-06,
|
2318 |
+
"loss": 2.1246,
|
2319 |
+
"step": 330
|
2320 |
+
},
|
2321 |
+
{
|
2322 |
+
"epoch": 0.9137336093857833,
|
2323 |
+
"grad_norm": 0.7124171257019043,
|
2324 |
+
"learning_rate": 1.9584743728819686e-06,
|
2325 |
+
"loss": 1.8016,
|
2326 |
+
"step": 331
|
2327 |
+
},
|
2328 |
+
{
|
2329 |
+
"epoch": 0.9164941338854382,
|
2330 |
+
"grad_norm": 0.9738188982009888,
|
2331 |
+
"learning_rate": 1.83872647396095e-06,
|
2332 |
+
"loss": 2.3653,
|
2333 |
+
"step": 332
|
2334 |
+
},
|
2335 |
+
{
|
2336 |
+
"epoch": 0.9192546583850931,
|
2337 |
+
"grad_norm": 0.7715499401092529,
|
2338 |
+
"learning_rate": 1.7226873362116257e-06,
|
2339 |
+
"loss": 2.4177,
|
2340 |
+
"step": 333
|
2341 |
+
},
|
2342 |
+
{
|
2343 |
+
"epoch": 0.9220151828847482,
|
2344 |
+
"grad_norm": 0.7315263748168945,
|
2345 |
+
"learning_rate": 1.6103658954742918e-06,
|
2346 |
+
"loss": 1.8725,
|
2347 |
+
"step": 334
|
2348 |
+
},
|
2349 |
+
{
|
2350 |
+
"epoch": 0.9247757073844031,
|
2351 |
+
"grad_norm": 0.7532916069030762,
|
2352 |
+
"learning_rate": 1.5017708013000786e-06,
|
2353 |
+
"loss": 2.2835,
|
2354 |
+
"step": 335
|
2355 |
+
},
|
2356 |
+
{
|
2357 |
+
"epoch": 0.927536231884058,
|
2358 |
+
"grad_norm": 0.7594407796859741,
|
2359 |
+
"learning_rate": 1.396910416284891e-06,
|
2360 |
+
"loss": 2.0195,
|
2361 |
+
"step": 336
|
2362 |
+
},
|
2363 |
+
{
|
2364 |
+
"epoch": 0.9302967563837129,
|
2365 |
+
"grad_norm": 0.9144908785820007,
|
2366 |
+
"learning_rate": 1.2957928154254172e-06,
|
2367 |
+
"loss": 2.2633,
|
2368 |
+
"step": 337
|
2369 |
+
},
|
2370 |
+
{
|
2371 |
+
"epoch": 0.9330572808833678,
|
2372 |
+
"grad_norm": 0.7762795090675354,
|
2373 |
+
"learning_rate": 1.1984257854973147e-06,
|
2374 |
+
"loss": 2.1501,
|
2375 |
+
"step": 338
|
2376 |
+
},
|
2377 |
+
{
|
2378 |
+
"epoch": 0.9358178053830227,
|
2379 |
+
"grad_norm": 1.0061681270599365,
|
2380 |
+
"learning_rate": 1.1048168244555513e-06,
|
2381 |
+
"loss": 2.2689,
|
2382 |
+
"step": 339
|
2383 |
+
},
|
2384 |
+
{
|
2385 |
+
"epoch": 0.9385783298826778,
|
2386 |
+
"grad_norm": 0.7763106822967529,
|
2387 |
+
"learning_rate": 1.0149731408569951e-06,
|
2388 |
+
"loss": 1.7844,
|
2389 |
+
"step": 340
|
2390 |
+
},
|
2391 |
+
{
|
2392 |
+
"epoch": 0.9413388543823327,
|
2393 |
+
"grad_norm": 0.7826094031333923,
|
2394 |
+
"learning_rate": 9.289016533053696e-07,
|
2395 |
+
"loss": 1.741,
|
2396 |
+
"step": 341
|
2397 |
+
},
|
2398 |
+
{
|
2399 |
+
"epoch": 0.9440993788819876,
|
2400 |
+
"grad_norm": 1.0212883949279785,
|
2401 |
+
"learning_rate": 8.46608989918396e-07,
|
2402 |
+
"loss": 1.9246,
|
2403 |
+
"step": 342
|
2404 |
+
},
|
2405 |
+
{
|
2406 |
+
"epoch": 0.9468599033816425,
|
2407 |
+
"grad_norm": 0.7793722748756409,
|
2408 |
+
"learning_rate": 7.681014878174187e-07,
|
2409 |
+
"loss": 1.6821,
|
2410 |
+
"step": 343
|
2411 |
+
},
|
2412 |
+
{
|
2413 |
+
"epoch": 0.9496204278812974,
|
2414 |
+
"grad_norm": 0.7322463989257812,
|
2415 |
+
"learning_rate": 6.933851926394175e-07,
|
2416 |
+
"loss": 1.7588,
|
2417 |
+
"step": 344
|
2418 |
+
},
|
2419 |
+
{
|
2420 |
+
"epoch": 0.9523809523809523,
|
2421 |
+
"grad_norm": 0.850485622882843,
|
2422 |
+
"learning_rate": 6.224658580713971e-07,
|
2423 |
+
"loss": 1.9027,
|
2424 |
+
"step": 345
|
2425 |
+
},
|
2426 |
+
{
|
2427 |
+
"epoch": 0.9551414768806074,
|
2428 |
+
"grad_norm": 0.9016429781913757,
|
2429 |
+
"learning_rate": 5.55348945407369e-07,
|
2430 |
+
"loss": 1.9853,
|
2431 |
+
"step": 346
|
2432 |
+
},
|
2433 |
+
{
|
2434 |
+
"epoch": 0.9579020013802623,
|
2435 |
+
"grad_norm": 0.9636366963386536,
|
2436 |
+
"learning_rate": 4.920396231277713e-07,
|
2437 |
+
"loss": 1.6812,
|
2438 |
+
"step": 347
|
2439 |
+
},
|
2440 |
+
{
|
2441 |
+
"epoch": 0.9606625258799172,
|
2442 |
+
"grad_norm": 0.7816176414489746,
|
2443 |
+
"learning_rate": 4.3254276650143144e-07,
|
2444 |
+
"loss": 1.7355,
|
2445 |
+
"step": 348
|
2446 |
+
},
|
2447 |
+
{
|
2448 |
+
"epoch": 0.9634230503795721,
|
2449 |
+
"grad_norm": 1.2527109384536743,
|
2450 |
+
"learning_rate": 3.7686295721018893e-07,
|
2451 |
+
"loss": 2.1681,
|
2452 |
+
"step": 349
|
2453 |
+
},
|
2454 |
+
{
|
2455 |
+
"epoch": 0.966183574879227,
|
2456 |
+
"grad_norm": 2.706392288208008,
|
2457 |
+
"learning_rate": 3.2500448299603305e-07,
|
2458 |
+
"loss": 2.2018,
|
2459 |
+
"step": 350
|
2460 |
+
},
|
2461 |
+
{
|
2462 |
+
"epoch": 0.968944099378882,
|
2463 |
+
"grad_norm": 0.5852137804031372,
|
2464 |
+
"learning_rate": 2.769713373309168e-07,
|
2465 |
+
"loss": 2.2764,
|
2466 |
+
"step": 351
|
2467 |
+
},
|
2468 |
+
{
|
2469 |
+
"epoch": 0.971704623878537,
|
2470 |
+
"grad_norm": 0.5416120290756226,
|
2471 |
+
"learning_rate": 2.3276721910926448e-07,
|
2472 |
+
"loss": 2.1675,
|
2473 |
+
"step": 352
|
2474 |
+
},
|
2475 |
+
{
|
2476 |
+
"epoch": 0.9744651483781919,
|
2477 |
+
"grad_norm": 0.5589115619659424,
|
2478 |
+
"learning_rate": 1.923955323630877e-07,
|
2479 |
+
"loss": 2.0924,
|
2480 |
+
"step": 353
|
2481 |
+
},
|
2482 |
+
{
|
2483 |
+
"epoch": 0.9772256728778468,
|
2484 |
+
"grad_norm": 0.582391619682312,
|
2485 |
+
"learning_rate": 1.5585938599989025e-07,
|
2486 |
+
"loss": 1.9631,
|
2487 |
+
"step": 354
|
2488 |
+
},
|
2489 |
+
{
|
2490 |
+
"epoch": 0.9799861973775017,
|
2491 |
+
"grad_norm": 0.6033303141593933,
|
2492 |
+
"learning_rate": 1.231615935632313e-07,
|
2493 |
+
"loss": 1.5731,
|
2494 |
+
"step": 355
|
2495 |
+
},
|
2496 |
+
{
|
2497 |
+
"epoch": 0.9827467218771566,
|
2498 |
+
"grad_norm": 0.7292796969413757,
|
2499 |
+
"learning_rate": 9.430467301607682e-08,
|
2500 |
+
"loss": 2.1532,
|
2501 |
+
"step": 356
|
2502 |
+
},
|
2503 |
+
{
|
2504 |
+
"epoch": 0.9855072463768116,
|
2505 |
+
"grad_norm": 0.768563449382782,
|
2506 |
+
"learning_rate": 6.929084654688222e-08,
|
2507 |
+
"loss": 2.1209,
|
2508 |
+
"step": 357
|
2509 |
+
},
|
2510 |
+
{
|
2511 |
+
"epoch": 0.9882677708764666,
|
2512 |
+
"grad_norm": 0.7994271516799927,
|
2513 |
+
"learning_rate": 4.8122040398496105e-08,
|
2514 |
+
"loss": 2.1563,
|
2515 |
+
"step": 358
|
2516 |
+
},
|
2517 |
+
{
|
2518 |
+
"epoch": 0.9910282953761215,
|
2519 |
+
"grad_norm": 0.800473690032959,
|
2520 |
+
"learning_rate": 3.0799884719795444e-08,
|
2521 |
+
"loss": 2.2567,
|
2522 |
+
"step": 359
|
2523 |
+
},
|
2524 |
+
{
|
2525 |
+
"epoch": 0.9937888198757764,
|
2526 |
+
"grad_norm": 0.6938081383705139,
|
2527 |
+
"learning_rate": 1.7325713440180524e-08,
|
2528 |
+
"loss": 1.8412,
|
2529 |
+
"step": 360
|
2530 |
+
},
|
2531 |
+
{
|
2532 |
+
"epoch": 0.9965493443754313,
|
2533 |
+
"grad_norm": 1.0545095205307007,
|
2534 |
+
"learning_rate": 7.700564166834844e-09,
|
2535 |
+
"loss": 2.372,
|
2536 |
+
"step": 361
|
2537 |
+
},
|
2538 |
+
{
|
2539 |
+
"epoch": 0.9993098688750862,
|
2540 |
+
"grad_norm": 0.8724227547645569,
|
2541 |
+
"learning_rate": 1.9251781048168493e-09,
|
2542 |
+
"loss": 1.7168,
|
2543 |
+
"step": 362
|
2544 |
+
},
|
2545 |
+
{
|
2546 |
+
"epoch": 0.9993098688750862,
|
2547 |
+
"eval_loss": 2.0619454383850098,
|
2548 |
+
"eval_runtime": 5.2483,
|
2549 |
+
"eval_samples_per_second": 58.114,
|
2550 |
+
"eval_steps_per_second": 14.671,
|
2551 |
+
"step": 362
|
2552 |
+
},
|
2553 |
+
{
|
2554 |
+
"epoch": 1.0020703933747412,
|
2555 |
+
"grad_norm": 2.523097276687622,
|
2556 |
+
"learning_rate": 0.0,
|
2557 |
+
"loss": 4.4299,
|
2558 |
+
"step": 363
|
2559 |
}
|
2560 |
],
|
2561 |
"logging_steps": 1,
|
|
|
2570 |
"should_evaluate": false,
|
2571 |
"should_log": false,
|
2572 |
"should_save": true,
|
2573 |
+
"should_training_stop": true
|
2574 |
},
|
2575 |
"attributes": {}
|
2576 |
}
|
2577 |
},
|
2578 |
+
"total_flos": 1.138052632608768e+16,
|
2579 |
"train_batch_size": 4,
|
2580 |
"trial_name": null,
|
2581 |
"trial_params": null
|