Training in progress, step 1421, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 25271744
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d3554ab3247c21883121fa30c4dc6eae5ab7bb10937df0d3ed2ce060f02266c5
|
3 |
size 25271744
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 13685836
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5716972665cb63813457499a1f584dbf7cb49132eb74dcec9ca6ce5bfc320325
|
3 |
size 13685836
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c7576d9ff26e1d563e7a57c0cb0bc8db78315d2e0d62e3ca7c48861f7433f521
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fdd027b2b620ca79cc7f0666273f412bb27bfcd3a8237887af64499e93eca1d2
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -7483,6 +7483,2477 @@
|
|
7483 |
"learning_rate": 1.4566307290975256e-05,
|
7484 |
"loss": 0.7826,
|
7485 |
"step": 1068
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7486 |
}
|
7487 |
],
|
7488 |
"logging_steps": 1,
|
@@ -7497,12 +9968,12 @@
|
|
7497 |
"should_evaluate": false,
|
7498 |
"should_log": false,
|
7499 |
"should_save": true,
|
7500 |
-
"should_training_stop":
|
7501 |
},
|
7502 |
"attributes": {}
|
7503 |
}
|
7504 |
},
|
7505 |
-
"total_flos": 1.
|
7506 |
"train_batch_size": 4,
|
7507 |
"trial_name": null,
|
7508 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.3872462188309034,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 1421,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
7483 |
"learning_rate": 1.4566307290975256e-05,
|
7484 |
"loss": 0.7826,
|
7485 |
"step": 1068
|
7486 |
+
},
|
7487 |
+
{
|
7488 |
+
"epoch": 0.29132034337103146,
|
7489 |
+
"grad_norm": 0.23479312658309937,
|
7490 |
+
"learning_rate": 1.4488128058544098e-05,
|
7491 |
+
"loss": 0.793,
|
7492 |
+
"step": 1069
|
7493 |
+
},
|
7494 |
+
{
|
7495 |
+
"epoch": 0.2915928600626788,
|
7496 |
+
"grad_norm": 0.2632211446762085,
|
7497 |
+
"learning_rate": 1.4410123628205135e-05,
|
7498 |
+
"loss": 0.7744,
|
7499 |
+
"step": 1070
|
7500 |
+
},
|
7501 |
+
{
|
7502 |
+
"epoch": 0.2918653767543262,
|
7503 |
+
"grad_norm": 0.2785129249095917,
|
7504 |
+
"learning_rate": 1.4332294383924033e-05,
|
7505 |
+
"loss": 0.661,
|
7506 |
+
"step": 1071
|
7507 |
+
},
|
7508 |
+
{
|
7509 |
+
"epoch": 0.2921378934459736,
|
7510 |
+
"grad_norm": 0.27100294828414917,
|
7511 |
+
"learning_rate": 1.4254640708804052e-05,
|
7512 |
+
"loss": 0.7543,
|
7513 |
+
"step": 1072
|
7514 |
+
},
|
7515 |
+
{
|
7516 |
+
"epoch": 0.2924104101376209,
|
7517 |
+
"grad_norm": 0.28306302428245544,
|
7518 |
+
"learning_rate": 1.4177162985084242e-05,
|
7519 |
+
"loss": 0.83,
|
7520 |
+
"step": 1073
|
7521 |
+
},
|
7522 |
+
{
|
7523 |
+
"epoch": 0.2926829268292683,
|
7524 |
+
"grad_norm": 0.2638351321220398,
|
7525 |
+
"learning_rate": 1.409986159413762e-05,
|
7526 |
+
"loss": 0.7151,
|
7527 |
+
"step": 1074
|
7528 |
+
},
|
7529 |
+
{
|
7530 |
+
"epoch": 0.2929554435209157,
|
7531 |
+
"grad_norm": 0.29239487648010254,
|
7532 |
+
"learning_rate": 1.4022736916469165e-05,
|
7533 |
+
"loss": 0.8336,
|
7534 |
+
"step": 1075
|
7535 |
+
},
|
7536 |
+
{
|
7537 |
+
"epoch": 0.29322796021256303,
|
7538 |
+
"grad_norm": 0.27162572741508484,
|
7539 |
+
"learning_rate": 1.3945789331714016e-05,
|
7540 |
+
"loss": 0.7233,
|
7541 |
+
"step": 1076
|
7542 |
+
},
|
7543 |
+
{
|
7544 |
+
"epoch": 0.2935004769042104,
|
7545 |
+
"grad_norm": 0.2926348149776459,
|
7546 |
+
"learning_rate": 1.3869019218635642e-05,
|
7547 |
+
"loss": 0.7579,
|
7548 |
+
"step": 1077
|
7549 |
+
},
|
7550 |
+
{
|
7551 |
+
"epoch": 0.29377299359585773,
|
7552 |
+
"grad_norm": 0.25599485635757446,
|
7553 |
+
"learning_rate": 1.3792426955123884e-05,
|
7554 |
+
"loss": 0.7765,
|
7555 |
+
"step": 1078
|
7556 |
+
},
|
7557 |
+
{
|
7558 |
+
"epoch": 0.2940455102875051,
|
7559 |
+
"grad_norm": 0.33074283599853516,
|
7560 |
+
"learning_rate": 1.3716012918193205e-05,
|
7561 |
+
"loss": 0.8985,
|
7562 |
+
"step": 1079
|
7563 |
+
},
|
7564 |
+
{
|
7565 |
+
"epoch": 0.2943180269791525,
|
7566 |
+
"grad_norm": 0.3323984444141388,
|
7567 |
+
"learning_rate": 1.363977748398067e-05,
|
7568 |
+
"loss": 0.5904,
|
7569 |
+
"step": 1080
|
7570 |
+
},
|
7571 |
+
{
|
7572 |
+
"epoch": 0.29459054367079984,
|
7573 |
+
"grad_norm": 0.29964008927345276,
|
7574 |
+
"learning_rate": 1.3563721027744308e-05,
|
7575 |
+
"loss": 0.9192,
|
7576 |
+
"step": 1081
|
7577 |
+
},
|
7578 |
+
{
|
7579 |
+
"epoch": 0.2948630603624472,
|
7580 |
+
"grad_norm": 0.32052668929100037,
|
7581 |
+
"learning_rate": 1.3487843923861098e-05,
|
7582 |
+
"loss": 0.7675,
|
7583 |
+
"step": 1082
|
7584 |
+
},
|
7585 |
+
{
|
7586 |
+
"epoch": 0.29513557705409454,
|
7587 |
+
"grad_norm": 0.29530084133148193,
|
7588 |
+
"learning_rate": 1.3412146545825166e-05,
|
7589 |
+
"loss": 0.8817,
|
7590 |
+
"step": 1083
|
7591 |
+
},
|
7592 |
+
{
|
7593 |
+
"epoch": 0.29540809374574195,
|
7594 |
+
"grad_norm": 0.3269053101539612,
|
7595 |
+
"learning_rate": 1.3336629266246031e-05,
|
7596 |
+
"loss": 0.7721,
|
7597 |
+
"step": 1084
|
7598 |
+
},
|
7599 |
+
{
|
7600 |
+
"epoch": 0.2956806104373893,
|
7601 |
+
"grad_norm": 0.3902830481529236,
|
7602 |
+
"learning_rate": 1.3261292456846647e-05,
|
7603 |
+
"loss": 0.8765,
|
7604 |
+
"step": 1085
|
7605 |
+
},
|
7606 |
+
{
|
7607 |
+
"epoch": 0.29595312712903665,
|
7608 |
+
"grad_norm": 0.3931809365749359,
|
7609 |
+
"learning_rate": 1.318613648846162e-05,
|
7610 |
+
"loss": 0.7157,
|
7611 |
+
"step": 1086
|
7612 |
+
},
|
7613 |
+
{
|
7614 |
+
"epoch": 0.296225643820684,
|
7615 |
+
"grad_norm": 0.38379886746406555,
|
7616 |
+
"learning_rate": 1.3111161731035448e-05,
|
7617 |
+
"loss": 0.7333,
|
7618 |
+
"step": 1087
|
7619 |
+
},
|
7620 |
+
{
|
7621 |
+
"epoch": 0.29649816051233135,
|
7622 |
+
"grad_norm": 0.39468738436698914,
|
7623 |
+
"learning_rate": 1.3036368553620604e-05,
|
7624 |
+
"loss": 0.7602,
|
7625 |
+
"step": 1088
|
7626 |
+
},
|
7627 |
+
{
|
7628 |
+
"epoch": 0.29677067720397876,
|
7629 |
+
"grad_norm": 0.5516607165336609,
|
7630 |
+
"learning_rate": 1.2961757324375767e-05,
|
7631 |
+
"loss": 0.8667,
|
7632 |
+
"step": 1089
|
7633 |
+
},
|
7634 |
+
{
|
7635 |
+
"epoch": 0.2970431938956261,
|
7636 |
+
"grad_norm": 0.4426347613334656,
|
7637 |
+
"learning_rate": 1.288732841056398e-05,
|
7638 |
+
"loss": 0.7729,
|
7639 |
+
"step": 1090
|
7640 |
+
},
|
7641 |
+
{
|
7642 |
+
"epoch": 0.29731571058727346,
|
7643 |
+
"grad_norm": 0.6150315999984741,
|
7644 |
+
"learning_rate": 1.2813082178550929e-05,
|
7645 |
+
"loss": 1.0035,
|
7646 |
+
"step": 1091
|
7647 |
+
},
|
7648 |
+
{
|
7649 |
+
"epoch": 0.2975882272789208,
|
7650 |
+
"grad_norm": 0.48583754897117615,
|
7651 |
+
"learning_rate": 1.2739018993803015e-05,
|
7652 |
+
"loss": 0.9011,
|
7653 |
+
"step": 1092
|
7654 |
+
},
|
7655 |
+
{
|
7656 |
+
"epoch": 0.2978607439705682,
|
7657 |
+
"grad_norm": 0.698724627494812,
|
7658 |
+
"learning_rate": 1.2665139220885613e-05,
|
7659 |
+
"loss": 0.9304,
|
7660 |
+
"step": 1093
|
7661 |
+
},
|
7662 |
+
{
|
7663 |
+
"epoch": 0.29813326066221557,
|
7664 |
+
"grad_norm": 0.7356919050216675,
|
7665 |
+
"learning_rate": 1.2591443223461336e-05,
|
7666 |
+
"loss": 0.8827,
|
7667 |
+
"step": 1094
|
7668 |
+
},
|
7669 |
+
{
|
7670 |
+
"epoch": 0.2984057773538629,
|
7671 |
+
"grad_norm": 0.7048261761665344,
|
7672 |
+
"learning_rate": 1.2517931364288133e-05,
|
7673 |
+
"loss": 0.9076,
|
7674 |
+
"step": 1095
|
7675 |
+
},
|
7676 |
+
{
|
7677 |
+
"epoch": 0.2986782940455103,
|
7678 |
+
"grad_norm": 0.589783251285553,
|
7679 |
+
"learning_rate": 1.244460400521757e-05,
|
7680 |
+
"loss": 0.8557,
|
7681 |
+
"step": 1096
|
7682 |
+
},
|
7683 |
+
{
|
7684 |
+
"epoch": 0.2989508107371576,
|
7685 |
+
"grad_norm": 0.8395978212356567,
|
7686 |
+
"learning_rate": 1.2371461507193078e-05,
|
7687 |
+
"loss": 1.1153,
|
7688 |
+
"step": 1097
|
7689 |
+
},
|
7690 |
+
{
|
7691 |
+
"epoch": 0.29922332742880503,
|
7692 |
+
"grad_norm": 0.8709211945533752,
|
7693 |
+
"learning_rate": 1.2298504230248082e-05,
|
7694 |
+
"loss": 0.8491,
|
7695 |
+
"step": 1098
|
7696 |
+
},
|
7697 |
+
{
|
7698 |
+
"epoch": 0.2994958441204524,
|
7699 |
+
"grad_norm": 1.1225576400756836,
|
7700 |
+
"learning_rate": 1.2225732533504308e-05,
|
7701 |
+
"loss": 1.0139,
|
7702 |
+
"step": 1099
|
7703 |
+
},
|
7704 |
+
{
|
7705 |
+
"epoch": 0.29976836081209973,
|
7706 |
+
"grad_norm": 1.8920093774795532,
|
7707 |
+
"learning_rate": 1.215314677516997e-05,
|
7708 |
+
"loss": 0.9761,
|
7709 |
+
"step": 1100
|
7710 |
+
},
|
7711 |
+
{
|
7712 |
+
"epoch": 0.3000408775037471,
|
7713 |
+
"grad_norm": 0.20418307185173035,
|
7714 |
+
"learning_rate": 1.2080747312538083e-05,
|
7715 |
+
"loss": 0.527,
|
7716 |
+
"step": 1101
|
7717 |
+
},
|
7718 |
+
{
|
7719 |
+
"epoch": 0.3003133941953945,
|
7720 |
+
"grad_norm": 0.17562133073806763,
|
7721 |
+
"learning_rate": 1.2008534501984586e-05,
|
7722 |
+
"loss": 0.4431,
|
7723 |
+
"step": 1102
|
7724 |
+
},
|
7725 |
+
{
|
7726 |
+
"epoch": 0.30058591088704184,
|
7727 |
+
"grad_norm": 0.25559869408607483,
|
7728 |
+
"learning_rate": 1.1936508698966664e-05,
|
7729 |
+
"loss": 0.7279,
|
7730 |
+
"step": 1103
|
7731 |
+
},
|
7732 |
+
{
|
7733 |
+
"epoch": 0.3008584275786892,
|
7734 |
+
"grad_norm": 0.17541925609111786,
|
7735 |
+
"learning_rate": 1.1864670258021021e-05,
|
7736 |
+
"loss": 0.6332,
|
7737 |
+
"step": 1104
|
7738 |
+
},
|
7739 |
+
{
|
7740 |
+
"epoch": 0.30113094427033654,
|
7741 |
+
"grad_norm": 0.20371516048908234,
|
7742 |
+
"learning_rate": 1.1793019532762056e-05,
|
7743 |
+
"loss": 0.7436,
|
7744 |
+
"step": 1105
|
7745 |
+
},
|
7746 |
+
{
|
7747 |
+
"epoch": 0.30140346096198395,
|
7748 |
+
"grad_norm": 0.1931721568107605,
|
7749 |
+
"learning_rate": 1.1721556875880168e-05,
|
7750 |
+
"loss": 0.5498,
|
7751 |
+
"step": 1106
|
7752 |
+
},
|
7753 |
+
{
|
7754 |
+
"epoch": 0.3016759776536313,
|
7755 |
+
"grad_norm": 0.17042608559131622,
|
7756 |
+
"learning_rate": 1.1650282639140064e-05,
|
7757 |
+
"loss": 0.455,
|
7758 |
+
"step": 1107
|
7759 |
+
},
|
7760 |
+
{
|
7761 |
+
"epoch": 0.30194849434527865,
|
7762 |
+
"grad_norm": 0.16724911332130432,
|
7763 |
+
"learning_rate": 1.1579197173378892e-05,
|
7764 |
+
"loss": 0.5129,
|
7765 |
+
"step": 1108
|
7766 |
+
},
|
7767 |
+
{
|
7768 |
+
"epoch": 0.302221011036926,
|
7769 |
+
"grad_norm": 0.17278067767620087,
|
7770 |
+
"learning_rate": 1.150830082850468e-05,
|
7771 |
+
"loss": 0.5597,
|
7772 |
+
"step": 1109
|
7773 |
+
},
|
7774 |
+
{
|
7775 |
+
"epoch": 0.30249352772857335,
|
7776 |
+
"grad_norm": 0.20282527804374695,
|
7777 |
+
"learning_rate": 1.1437593953494541e-05,
|
7778 |
+
"loss": 0.6917,
|
7779 |
+
"step": 1110
|
7780 |
+
},
|
7781 |
+
{
|
7782 |
+
"epoch": 0.30276604442022076,
|
7783 |
+
"grad_norm": 0.19484999775886536,
|
7784 |
+
"learning_rate": 1.1367076896392853e-05,
|
7785 |
+
"loss": 0.723,
|
7786 |
+
"step": 1111
|
7787 |
+
},
|
7788 |
+
{
|
7789 |
+
"epoch": 0.3030385611118681,
|
7790 |
+
"grad_norm": 0.233414426445961,
|
7791 |
+
"learning_rate": 1.1296750004309758e-05,
|
7792 |
+
"loss": 0.7689,
|
7793 |
+
"step": 1112
|
7794 |
+
},
|
7795 |
+
{
|
7796 |
+
"epoch": 0.30331107780351546,
|
7797 |
+
"grad_norm": 0.23547577857971191,
|
7798 |
+
"learning_rate": 1.1226613623419269e-05,
|
7799 |
+
"loss": 0.7447,
|
7800 |
+
"step": 1113
|
7801 |
+
},
|
7802 |
+
{
|
7803 |
+
"epoch": 0.3035835944951628,
|
7804 |
+
"grad_norm": 0.2539953291416168,
|
7805 |
+
"learning_rate": 1.1156668098957645e-05,
|
7806 |
+
"loss": 0.7404,
|
7807 |
+
"step": 1114
|
7808 |
+
},
|
7809 |
+
{
|
7810 |
+
"epoch": 0.3038561111868102,
|
7811 |
+
"grad_norm": 0.2312745302915573,
|
7812 |
+
"learning_rate": 1.1086913775221709e-05,
|
7813 |
+
"loss": 0.7263,
|
7814 |
+
"step": 1115
|
7815 |
+
},
|
7816 |
+
{
|
7817 |
+
"epoch": 0.30412862787845757,
|
7818 |
+
"grad_norm": 0.22205998003482819,
|
7819 |
+
"learning_rate": 1.1017350995567099e-05,
|
7820 |
+
"loss": 0.7813,
|
7821 |
+
"step": 1116
|
7822 |
+
},
|
7823 |
+
{
|
7824 |
+
"epoch": 0.3044011445701049,
|
7825 |
+
"grad_norm": 0.2412424385547638,
|
7826 |
+
"learning_rate": 1.0947980102406596e-05,
|
7827 |
+
"loss": 0.7225,
|
7828 |
+
"step": 1117
|
7829 |
+
},
|
7830 |
+
{
|
7831 |
+
"epoch": 0.30467366126175227,
|
7832 |
+
"grad_norm": 0.32700082659721375,
|
7833 |
+
"learning_rate": 1.0878801437208496e-05,
|
7834 |
+
"loss": 0.8742,
|
7835 |
+
"step": 1118
|
7836 |
+
},
|
7837 |
+
{
|
7838 |
+
"epoch": 0.3049461779533996,
|
7839 |
+
"grad_norm": 0.2551735043525696,
|
7840 |
+
"learning_rate": 1.0809815340494823e-05,
|
7841 |
+
"loss": 0.7537,
|
7842 |
+
"step": 1119
|
7843 |
+
},
|
7844 |
+
{
|
7845 |
+
"epoch": 0.30521869464504703,
|
7846 |
+
"grad_norm": 0.23969170451164246,
|
7847 |
+
"learning_rate": 1.074102215183977e-05,
|
7848 |
+
"loss": 0.6806,
|
7849 |
+
"step": 1120
|
7850 |
+
},
|
7851 |
+
{
|
7852 |
+
"epoch": 0.3054912113366944,
|
7853 |
+
"grad_norm": 0.24101929366588593,
|
7854 |
+
"learning_rate": 1.0672422209867878e-05,
|
7855 |
+
"loss": 0.6994,
|
7856 |
+
"step": 1121
|
7857 |
+
},
|
7858 |
+
{
|
7859 |
+
"epoch": 0.30576372802834173,
|
7860 |
+
"grad_norm": 0.3099350333213806,
|
7861 |
+
"learning_rate": 1.0604015852252557e-05,
|
7862 |
+
"loss": 0.8019,
|
7863 |
+
"step": 1122
|
7864 |
+
},
|
7865 |
+
{
|
7866 |
+
"epoch": 0.3060362447199891,
|
7867 |
+
"grad_norm": 0.26219817996025085,
|
7868 |
+
"learning_rate": 1.0535803415714279e-05,
|
7869 |
+
"loss": 0.7896,
|
7870 |
+
"step": 1123
|
7871 |
+
},
|
7872 |
+
{
|
7873 |
+
"epoch": 0.3063087614116365,
|
7874 |
+
"grad_norm": 0.30376192927360535,
|
7875 |
+
"learning_rate": 1.0467785236018945e-05,
|
7876 |
+
"loss": 0.8346,
|
7877 |
+
"step": 1124
|
7878 |
+
},
|
7879 |
+
{
|
7880 |
+
"epoch": 0.30658127810328384,
|
7881 |
+
"grad_norm": 0.2685557007789612,
|
7882 |
+
"learning_rate": 1.0399961647976314e-05,
|
7883 |
+
"loss": 0.77,
|
7884 |
+
"step": 1125
|
7885 |
+
},
|
7886 |
+
{
|
7887 |
+
"epoch": 0.3068537947949312,
|
7888 |
+
"grad_norm": 0.2990666925907135,
|
7889 |
+
"learning_rate": 1.0332332985438248e-05,
|
7890 |
+
"loss": 0.877,
|
7891 |
+
"step": 1126
|
7892 |
+
},
|
7893 |
+
{
|
7894 |
+
"epoch": 0.30712631148657854,
|
7895 |
+
"grad_norm": 0.2962383031845093,
|
7896 |
+
"learning_rate": 1.0264899581297122e-05,
|
7897 |
+
"loss": 0.83,
|
7898 |
+
"step": 1127
|
7899 |
+
},
|
7900 |
+
{
|
7901 |
+
"epoch": 0.3073988281782259,
|
7902 |
+
"grad_norm": 0.2926284074783325,
|
7903 |
+
"learning_rate": 1.0197661767484206e-05,
|
7904 |
+
"loss": 0.7342,
|
7905 |
+
"step": 1128
|
7906 |
+
},
|
7907 |
+
{
|
7908 |
+
"epoch": 0.3076713448698733,
|
7909 |
+
"grad_norm": 0.33166196942329407,
|
7910 |
+
"learning_rate": 1.0130619874967983e-05,
|
7911 |
+
"loss": 0.8257,
|
7912 |
+
"step": 1129
|
7913 |
+
},
|
7914 |
+
{
|
7915 |
+
"epoch": 0.30794386156152065,
|
7916 |
+
"grad_norm": 0.28406834602355957,
|
7917 |
+
"learning_rate": 1.0063774233752542e-05,
|
7918 |
+
"loss": 0.7107,
|
7919 |
+
"step": 1130
|
7920 |
+
},
|
7921 |
+
{
|
7922 |
+
"epoch": 0.308216378253168,
|
7923 |
+
"grad_norm": 0.3521987795829773,
|
7924 |
+
"learning_rate": 9.997125172875943e-06,
|
7925 |
+
"loss": 0.8114,
|
7926 |
+
"step": 1131
|
7927 |
+
},
|
7928 |
+
{
|
7929 |
+
"epoch": 0.30848889494481535,
|
7930 |
+
"grad_norm": 0.3112970292568207,
|
7931 |
+
"learning_rate": 9.930673020408648e-06,
|
7932 |
+
"loss": 0.8324,
|
7933 |
+
"step": 1132
|
7934 |
+
},
|
7935 |
+
{
|
7936 |
+
"epoch": 0.30876141163646276,
|
7937 |
+
"grad_norm": 0.4232897162437439,
|
7938 |
+
"learning_rate": 9.864418103451828e-06,
|
7939 |
+
"loss": 0.9218,
|
7940 |
+
"step": 1133
|
7941 |
+
},
|
7942 |
+
{
|
7943 |
+
"epoch": 0.3090339283281101,
|
7944 |
+
"grad_norm": 0.45642492175102234,
|
7945 |
+
"learning_rate": 9.798360748135782e-06,
|
7946 |
+
"loss": 0.8459,
|
7947 |
+
"step": 1134
|
7948 |
+
},
|
7949 |
+
{
|
7950 |
+
"epoch": 0.30930644501975746,
|
7951 |
+
"grad_norm": 0.43862730264663696,
|
7952 |
+
"learning_rate": 9.73250127961839e-06,
|
7953 |
+
"loss": 0.8717,
|
7954 |
+
"step": 1135
|
7955 |
+
},
|
7956 |
+
{
|
7957 |
+
"epoch": 0.3095789617114048,
|
7958 |
+
"grad_norm": 0.3937472105026245,
|
7959 |
+
"learning_rate": 9.666840022083423e-06,
|
7960 |
+
"loss": 0.8504,
|
7961 |
+
"step": 1136
|
7962 |
+
},
|
7963 |
+
{
|
7964 |
+
"epoch": 0.30985147840305216,
|
7965 |
+
"grad_norm": 0.4388231933116913,
|
7966 |
+
"learning_rate": 9.601377298738977e-06,
|
7967 |
+
"loss": 0.8735,
|
7968 |
+
"step": 1137
|
7969 |
+
},
|
7970 |
+
{
|
7971 |
+
"epoch": 0.31012399509469957,
|
7972 |
+
"grad_norm": 0.4094635844230652,
|
7973 |
+
"learning_rate": 9.53611343181594e-06,
|
7974 |
+
"loss": 0.8977,
|
7975 |
+
"step": 1138
|
7976 |
+
},
|
7977 |
+
{
|
7978 |
+
"epoch": 0.3103965117863469,
|
7979 |
+
"grad_norm": 0.44300606846809387,
|
7980 |
+
"learning_rate": 9.471048742566313e-06,
|
7981 |
+
"loss": 0.7719,
|
7982 |
+
"step": 1139
|
7983 |
+
},
|
7984 |
+
{
|
7985 |
+
"epoch": 0.31066902847799427,
|
7986 |
+
"grad_norm": 0.4214814007282257,
|
7987 |
+
"learning_rate": 9.406183551261683e-06,
|
7988 |
+
"loss": 0.7544,
|
7989 |
+
"step": 1140
|
7990 |
+
},
|
7991 |
+
{
|
7992 |
+
"epoch": 0.3109415451696416,
|
7993 |
+
"grad_norm": 0.5005995631217957,
|
7994 |
+
"learning_rate": 9.34151817719166e-06,
|
7995 |
+
"loss": 0.8141,
|
7996 |
+
"step": 1141
|
7997 |
+
},
|
7998 |
+
{
|
7999 |
+
"epoch": 0.311214061861289,
|
8000 |
+
"grad_norm": 0.4700961410999298,
|
8001 |
+
"learning_rate": 9.277052938662261e-06,
|
8002 |
+
"loss": 0.8017,
|
8003 |
+
"step": 1142
|
8004 |
+
},
|
8005 |
+
{
|
8006 |
+
"epoch": 0.3114865785529364,
|
8007 |
+
"grad_norm": 0.7901832461357117,
|
8008 |
+
"learning_rate": 9.212788152994367e-06,
|
8009 |
+
"loss": 0.9894,
|
8010 |
+
"step": 1143
|
8011 |
+
},
|
8012 |
+
{
|
8013 |
+
"epoch": 0.31175909524458373,
|
8014 |
+
"grad_norm": 0.6408256888389587,
|
8015 |
+
"learning_rate": 9.148724136522146e-06,
|
8016 |
+
"loss": 0.8075,
|
8017 |
+
"step": 1144
|
8018 |
+
},
|
8019 |
+
{
|
8020 |
+
"epoch": 0.3120316119362311,
|
8021 |
+
"grad_norm": 0.6773266196250916,
|
8022 |
+
"learning_rate": 9.084861204591549e-06,
|
8023 |
+
"loss": 0.8681,
|
8024 |
+
"step": 1145
|
8025 |
+
},
|
8026 |
+
{
|
8027 |
+
"epoch": 0.31230412862787843,
|
8028 |
+
"grad_norm": 0.7805168628692627,
|
8029 |
+
"learning_rate": 9.021199671558683e-06,
|
8030 |
+
"loss": 1.012,
|
8031 |
+
"step": 1146
|
8032 |
+
},
|
8033 |
+
{
|
8034 |
+
"epoch": 0.31257664531952584,
|
8035 |
+
"grad_norm": 0.6709644198417664,
|
8036 |
+
"learning_rate": 8.957739850788289e-06,
|
8037 |
+
"loss": 0.8459,
|
8038 |
+
"step": 1147
|
8039 |
+
},
|
8040 |
+
{
|
8041 |
+
"epoch": 0.3128491620111732,
|
8042 |
+
"grad_norm": 0.9978117346763611,
|
8043 |
+
"learning_rate": 8.894482054652248e-06,
|
8044 |
+
"loss": 1.1881,
|
8045 |
+
"step": 1148
|
8046 |
+
},
|
8047 |
+
{
|
8048 |
+
"epoch": 0.31312167870282054,
|
8049 |
+
"grad_norm": 0.9258030652999878,
|
8050 |
+
"learning_rate": 8.831426594527975e-06,
|
8051 |
+
"loss": 1.0228,
|
8052 |
+
"step": 1149
|
8053 |
+
},
|
8054 |
+
{
|
8055 |
+
"epoch": 0.3133941953944679,
|
8056 |
+
"grad_norm": 1.9490892887115479,
|
8057 |
+
"learning_rate": 8.768573780796913e-06,
|
8058 |
+
"loss": 1.071,
|
8059 |
+
"step": 1150
|
8060 |
+
},
|
8061 |
+
{
|
8062 |
+
"epoch": 0.3136667120861153,
|
8063 |
+
"grad_norm": 0.20450201630592346,
|
8064 |
+
"learning_rate": 8.70592392284304e-06,
|
8065 |
+
"loss": 0.5196,
|
8066 |
+
"step": 1151
|
8067 |
+
},
|
8068 |
+
{
|
8069 |
+
"epoch": 0.31393922877776265,
|
8070 |
+
"grad_norm": 0.25564107298851013,
|
8071 |
+
"learning_rate": 8.643477329051248e-06,
|
8072 |
+
"loss": 0.7516,
|
8073 |
+
"step": 1152
|
8074 |
+
},
|
8075 |
+
{
|
8076 |
+
"epoch": 0.31421174546941,
|
8077 |
+
"grad_norm": 0.2340194582939148,
|
8078 |
+
"learning_rate": 8.581234306805969e-06,
|
8079 |
+
"loss": 0.7765,
|
8080 |
+
"step": 1153
|
8081 |
+
},
|
8082 |
+
{
|
8083 |
+
"epoch": 0.31448426216105735,
|
8084 |
+
"grad_norm": 0.19656455516815186,
|
8085 |
+
"learning_rate": 8.51919516248953e-06,
|
8086 |
+
"loss": 0.6065,
|
8087 |
+
"step": 1154
|
8088 |
+
},
|
8089 |
+
{
|
8090 |
+
"epoch": 0.31475677885270475,
|
8091 |
+
"grad_norm": 0.20690998435020447,
|
8092 |
+
"learning_rate": 8.457360201480702e-06,
|
8093 |
+
"loss": 0.7054,
|
8094 |
+
"step": 1155
|
8095 |
+
},
|
8096 |
+
{
|
8097 |
+
"epoch": 0.3150292955443521,
|
8098 |
+
"grad_norm": 0.18005023896694183,
|
8099 |
+
"learning_rate": 8.39572972815323e-06,
|
8100 |
+
"loss": 0.6296,
|
8101 |
+
"step": 1156
|
8102 |
+
},
|
8103 |
+
{
|
8104 |
+
"epoch": 0.31530181223599946,
|
8105 |
+
"grad_norm": 0.1978408396244049,
|
8106 |
+
"learning_rate": 8.334304045874247e-06,
|
8107 |
+
"loss": 0.6379,
|
8108 |
+
"step": 1157
|
8109 |
+
},
|
8110 |
+
{
|
8111 |
+
"epoch": 0.3155743289276468,
|
8112 |
+
"grad_norm": 0.1844942420721054,
|
8113 |
+
"learning_rate": 8.273083457002883e-06,
|
8114 |
+
"loss": 0.6146,
|
8115 |
+
"step": 1158
|
8116 |
+
},
|
8117 |
+
{
|
8118 |
+
"epoch": 0.31584684561929416,
|
8119 |
+
"grad_norm": 0.21633177995681763,
|
8120 |
+
"learning_rate": 8.212068262888684e-06,
|
8121 |
+
"loss": 0.7411,
|
8122 |
+
"step": 1159
|
8123 |
+
},
|
8124 |
+
{
|
8125 |
+
"epoch": 0.31611936231094157,
|
8126 |
+
"grad_norm": 0.2172597050666809,
|
8127 |
+
"learning_rate": 8.151258763870178e-06,
|
8128 |
+
"loss": 0.8363,
|
8129 |
+
"step": 1160
|
8130 |
+
},
|
8131 |
+
{
|
8132 |
+
"epoch": 0.3163918790025889,
|
8133 |
+
"grad_norm": 0.21970897912979126,
|
8134 |
+
"learning_rate": 8.090655259273428e-06,
|
8135 |
+
"loss": 0.7805,
|
8136 |
+
"step": 1161
|
8137 |
+
},
|
8138 |
+
{
|
8139 |
+
"epoch": 0.31666439569423627,
|
8140 |
+
"grad_norm": 0.20643360912799835,
|
8141 |
+
"learning_rate": 8.030258047410438e-06,
|
8142 |
+
"loss": 0.678,
|
8143 |
+
"step": 1162
|
8144 |
+
},
|
8145 |
+
{
|
8146 |
+
"epoch": 0.3169369123858836,
|
8147 |
+
"grad_norm": 0.22377657890319824,
|
8148 |
+
"learning_rate": 7.970067425577848e-06,
|
8149 |
+
"loss": 0.7387,
|
8150 |
+
"step": 1163
|
8151 |
+
},
|
8152 |
+
{
|
8153 |
+
"epoch": 0.317209429077531,
|
8154 |
+
"grad_norm": 0.27889934182167053,
|
8155 |
+
"learning_rate": 7.910083690055347e-06,
|
8156 |
+
"loss": 0.7438,
|
8157 |
+
"step": 1164
|
8158 |
+
},
|
8159 |
+
{
|
8160 |
+
"epoch": 0.3174819457691784,
|
8161 |
+
"grad_norm": 0.23422960937023163,
|
8162 |
+
"learning_rate": 7.850307136104247e-06,
|
8163 |
+
"loss": 0.7388,
|
8164 |
+
"step": 1165
|
8165 |
+
},
|
8166 |
+
{
|
8167 |
+
"epoch": 0.3177544624608257,
|
8168 |
+
"grad_norm": 0.2543887495994568,
|
8169 |
+
"learning_rate": 7.79073805796608e-06,
|
8170 |
+
"loss": 0.7061,
|
8171 |
+
"step": 1166
|
8172 |
+
},
|
8173 |
+
{
|
8174 |
+
"epoch": 0.3180269791524731,
|
8175 |
+
"grad_norm": 0.23068580031394958,
|
8176 |
+
"learning_rate": 7.73137674886107e-06,
|
8177 |
+
"loss": 0.7183,
|
8178 |
+
"step": 1167
|
8179 |
+
},
|
8180 |
+
{
|
8181 |
+
"epoch": 0.31829949584412043,
|
8182 |
+
"grad_norm": 0.21677877008914948,
|
8183 |
+
"learning_rate": 7.67222350098673e-06,
|
8184 |
+
"loss": 0.7613,
|
8185 |
+
"step": 1168
|
8186 |
+
},
|
8187 |
+
{
|
8188 |
+
"epoch": 0.31857201253576783,
|
8189 |
+
"grad_norm": 0.31931090354919434,
|
8190 |
+
"learning_rate": 7.613278605516455e-06,
|
8191 |
+
"loss": 0.8171,
|
8192 |
+
"step": 1169
|
8193 |
+
},
|
8194 |
+
{
|
8195 |
+
"epoch": 0.3188445292274152,
|
8196 |
+
"grad_norm": 0.274218887090683,
|
8197 |
+
"learning_rate": 7.554542352598021e-06,
|
8198 |
+
"loss": 0.8142,
|
8199 |
+
"step": 1170
|
8200 |
+
},
|
8201 |
+
{
|
8202 |
+
"epoch": 0.31911704591906254,
|
8203 |
+
"grad_norm": 0.2706718146800995,
|
8204 |
+
"learning_rate": 7.4960150313521996e-06,
|
8205 |
+
"loss": 0.8394,
|
8206 |
+
"step": 1171
|
8207 |
+
},
|
8208 |
+
{
|
8209 |
+
"epoch": 0.3193895626107099,
|
8210 |
+
"grad_norm": 0.26471391320228577,
|
8211 |
+
"learning_rate": 7.4376969298713124e-06,
|
8212 |
+
"loss": 0.8885,
|
8213 |
+
"step": 1172
|
8214 |
+
},
|
8215 |
+
{
|
8216 |
+
"epoch": 0.3196620793023573,
|
8217 |
+
"grad_norm": 0.2558121383190155,
|
8218 |
+
"learning_rate": 7.379588335217874e-06,
|
8219 |
+
"loss": 0.8169,
|
8220 |
+
"step": 1173
|
8221 |
+
},
|
8222 |
+
{
|
8223 |
+
"epoch": 0.31993459599400464,
|
8224 |
+
"grad_norm": 0.2988712787628174,
|
8225 |
+
"learning_rate": 7.321689533423093e-06,
|
8226 |
+
"loss": 0.7823,
|
8227 |
+
"step": 1174
|
8228 |
+
},
|
8229 |
+
{
|
8230 |
+
"epoch": 0.320207112685652,
|
8231 |
+
"grad_norm": 0.27736392617225647,
|
8232 |
+
"learning_rate": 7.264000809485483e-06,
|
8233 |
+
"loss": 0.6835,
|
8234 |
+
"step": 1175
|
8235 |
+
},
|
8236 |
+
{
|
8237 |
+
"epoch": 0.32047962937729935,
|
8238 |
+
"grad_norm": 0.31084221601486206,
|
8239 |
+
"learning_rate": 7.206522447369546e-06,
|
8240 |
+
"loss": 0.8113,
|
8241 |
+
"step": 1176
|
8242 |
+
},
|
8243 |
+
{
|
8244 |
+
"epoch": 0.3207521460689467,
|
8245 |
+
"grad_norm": 0.30161961913108826,
|
8246 |
+
"learning_rate": 7.1492547300042455e-06,
|
8247 |
+
"loss": 0.8359,
|
8248 |
+
"step": 1177
|
8249 |
+
},
|
8250 |
+
{
|
8251 |
+
"epoch": 0.3210246627605941,
|
8252 |
+
"grad_norm": 0.33541855216026306,
|
8253 |
+
"learning_rate": 7.092197939281698e-06,
|
8254 |
+
"loss": 0.9127,
|
8255 |
+
"step": 1178
|
8256 |
+
},
|
8257 |
+
{
|
8258 |
+
"epoch": 0.32129717945224145,
|
8259 |
+
"grad_norm": 0.3337746262550354,
|
8260 |
+
"learning_rate": 7.0353523560557856e-06,
|
8261 |
+
"loss": 0.7512,
|
8262 |
+
"step": 1179
|
8263 |
+
},
|
8264 |
+
{
|
8265 |
+
"epoch": 0.3215696961438888,
|
8266 |
+
"grad_norm": 0.3435850143432617,
|
8267 |
+
"learning_rate": 6.978718260140715e-06,
|
8268 |
+
"loss": 0.7868,
|
8269 |
+
"step": 1180
|
8270 |
+
},
|
8271 |
+
{
|
8272 |
+
"epoch": 0.32184221283553616,
|
8273 |
+
"grad_norm": 0.3651219606399536,
|
8274 |
+
"learning_rate": 6.922295930309691e-06,
|
8275 |
+
"loss": 0.8309,
|
8276 |
+
"step": 1181
|
8277 |
+
},
|
8278 |
+
{
|
8279 |
+
"epoch": 0.32211472952718356,
|
8280 |
+
"grad_norm": 0.3199502229690552,
|
8281 |
+
"learning_rate": 6.866085644293551e-06,
|
8282 |
+
"loss": 0.7895,
|
8283 |
+
"step": 1182
|
8284 |
+
},
|
8285 |
+
{
|
8286 |
+
"epoch": 0.3223872462188309,
|
8287 |
+
"grad_norm": 0.3950710892677307,
|
8288 |
+
"learning_rate": 6.810087678779353e-06,
|
8289 |
+
"loss": 0.8284,
|
8290 |
+
"step": 1183
|
8291 |
+
},
|
8292 |
+
{
|
8293 |
+
"epoch": 0.32265976291047826,
|
8294 |
+
"grad_norm": 0.4900633990764618,
|
8295 |
+
"learning_rate": 6.754302309409033e-06,
|
8296 |
+
"loss": 0.8346,
|
8297 |
+
"step": 1184
|
8298 |
+
},
|
8299 |
+
{
|
8300 |
+
"epoch": 0.3229322796021256,
|
8301 |
+
"grad_norm": 0.4313778579235077,
|
8302 |
+
"learning_rate": 6.698729810778065e-06,
|
8303 |
+
"loss": 0.8243,
|
8304 |
+
"step": 1185
|
8305 |
+
},
|
8306 |
+
{
|
8307 |
+
"epoch": 0.32320479629377297,
|
8308 |
+
"grad_norm": 0.4453745484352112,
|
8309 |
+
"learning_rate": 6.643370456434117e-06,
|
8310 |
+
"loss": 0.7355,
|
8311 |
+
"step": 1186
|
8312 |
+
},
|
8313 |
+
{
|
8314 |
+
"epoch": 0.3234773129854204,
|
8315 |
+
"grad_norm": 0.5219870209693909,
|
8316 |
+
"learning_rate": 6.588224518875647e-06,
|
8317 |
+
"loss": 0.9848,
|
8318 |
+
"step": 1187
|
8319 |
+
},
|
8320 |
+
{
|
8321 |
+
"epoch": 0.3237498296770677,
|
8322 |
+
"grad_norm": 0.4670015871524811,
|
8323 |
+
"learning_rate": 6.533292269550612e-06,
|
8324 |
+
"loss": 0.8423,
|
8325 |
+
"step": 1188
|
8326 |
+
},
|
8327 |
+
{
|
8328 |
+
"epoch": 0.3240223463687151,
|
8329 |
+
"grad_norm": 0.48384127020835876,
|
8330 |
+
"learning_rate": 6.478573978855146e-06,
|
8331 |
+
"loss": 0.8114,
|
8332 |
+
"step": 1189
|
8333 |
+
},
|
8334 |
+
{
|
8335 |
+
"epoch": 0.3242948630603624,
|
8336 |
+
"grad_norm": 0.4995877146720886,
|
8337 |
+
"learning_rate": 6.424069916132164e-06,
|
8338 |
+
"loss": 0.7772,
|
8339 |
+
"step": 1190
|
8340 |
+
},
|
8341 |
+
{
|
8342 |
+
"epoch": 0.32456737975200983,
|
8343 |
+
"grad_norm": 0.5030633211135864,
|
8344 |
+
"learning_rate": 6.3697803496700845e-06,
|
8345 |
+
"loss": 0.8405,
|
8346 |
+
"step": 1191
|
8347 |
+
},
|
8348 |
+
{
|
8349 |
+
"epoch": 0.3248398964436572,
|
8350 |
+
"grad_norm": 0.5471214652061462,
|
8351 |
+
"learning_rate": 6.315705546701539e-06,
|
8352 |
+
"loss": 0.7965,
|
8353 |
+
"step": 1192
|
8354 |
+
},
|
8355 |
+
{
|
8356 |
+
"epoch": 0.32511241313530453,
|
8357 |
+
"grad_norm": 0.647562563419342,
|
8358 |
+
"learning_rate": 6.2618457734019364e-06,
|
8359 |
+
"loss": 0.8926,
|
8360 |
+
"step": 1193
|
8361 |
+
},
|
8362 |
+
{
|
8363 |
+
"epoch": 0.3253849298269519,
|
8364 |
+
"grad_norm": 0.5754622220993042,
|
8365 |
+
"learning_rate": 6.2082012948883164e-06,
|
8366 |
+
"loss": 0.8234,
|
8367 |
+
"step": 1194
|
8368 |
+
},
|
8369 |
+
{
|
8370 |
+
"epoch": 0.3256574465185993,
|
8371 |
+
"grad_norm": 0.6433972120285034,
|
8372 |
+
"learning_rate": 6.1547723752179045e-06,
|
8373 |
+
"loss": 1.0198,
|
8374 |
+
"step": 1195
|
8375 |
+
},
|
8376 |
+
{
|
8377 |
+
"epoch": 0.32592996321024664,
|
8378 |
+
"grad_norm": 0.7429008483886719,
|
8379 |
+
"learning_rate": 6.101559277386903e-06,
|
8380 |
+
"loss": 1.0615,
|
8381 |
+
"step": 1196
|
8382 |
+
},
|
8383 |
+
{
|
8384 |
+
"epoch": 0.326202479901894,
|
8385 |
+
"grad_norm": 0.6810776591300964,
|
8386 |
+
"learning_rate": 6.048562263329138e-06,
|
8387 |
+
"loss": 0.9668,
|
8388 |
+
"step": 1197
|
8389 |
+
},
|
8390 |
+
{
|
8391 |
+
"epoch": 0.32647499659354134,
|
8392 |
+
"grad_norm": 0.7128338813781738,
|
8393 |
+
"learning_rate": 5.9957815939147955e-06,
|
8394 |
+
"loss": 0.9411,
|
8395 |
+
"step": 1198
|
8396 |
+
},
|
8397 |
+
{
|
8398 |
+
"epoch": 0.3267475132851887,
|
8399 |
+
"grad_norm": 0.9658266305923462,
|
8400 |
+
"learning_rate": 5.943217528949168e-06,
|
8401 |
+
"loss": 1.0195,
|
8402 |
+
"step": 1199
|
8403 |
+
},
|
8404 |
+
{
|
8405 |
+
"epoch": 0.3270200299768361,
|
8406 |
+
"grad_norm": 1.6522877216339111,
|
8407 |
+
"learning_rate": 5.890870327171311e-06,
|
8408 |
+
"loss": 1.0887,
|
8409 |
+
"step": 1200
|
8410 |
+
},
|
8411 |
+
{
|
8412 |
+
"epoch": 0.32729254666848345,
|
8413 |
+
"grad_norm": 0.16899587213993073,
|
8414 |
+
"learning_rate": 5.838740246252794e-06,
|
8415 |
+
"loss": 0.5265,
|
8416 |
+
"step": 1201
|
8417 |
+
},
|
8418 |
+
{
|
8419 |
+
"epoch": 0.3275650633601308,
|
8420 |
+
"grad_norm": 0.27041736245155334,
|
8421 |
+
"learning_rate": 5.786827542796491e-06,
|
8422 |
+
"loss": 0.7685,
|
8423 |
+
"step": 1202
|
8424 |
+
},
|
8425 |
+
{
|
8426 |
+
"epoch": 0.32783758005177815,
|
8427 |
+
"grad_norm": 0.2172725796699524,
|
8428 |
+
"learning_rate": 5.735132472335192e-06,
|
8429 |
+
"loss": 0.682,
|
8430 |
+
"step": 1203
|
8431 |
+
},
|
8432 |
+
{
|
8433 |
+
"epoch": 0.32811009674342556,
|
8434 |
+
"grad_norm": 0.18529295921325684,
|
8435 |
+
"learning_rate": 5.68365528933048e-06,
|
8436 |
+
"loss": 0.7083,
|
8437 |
+
"step": 1204
|
8438 |
+
},
|
8439 |
+
{
|
8440 |
+
"epoch": 0.3283826134350729,
|
8441 |
+
"grad_norm": 0.1669882982969284,
|
8442 |
+
"learning_rate": 5.6323962471714286e-06,
|
8443 |
+
"loss": 0.5485,
|
8444 |
+
"step": 1205
|
8445 |
+
},
|
8446 |
+
{
|
8447 |
+
"epoch": 0.32865513012672026,
|
8448 |
+
"grad_norm": 0.1922728568315506,
|
8449 |
+
"learning_rate": 5.581355598173265e-06,
|
8450 |
+
"loss": 0.6963,
|
8451 |
+
"step": 1206
|
8452 |
+
},
|
8453 |
+
{
|
8454 |
+
"epoch": 0.3289276468183676,
|
8455 |
+
"grad_norm": 0.1576596349477768,
|
8456 |
+
"learning_rate": 5.5305335935762924e-06,
|
8457 |
+
"loss": 0.6041,
|
8458 |
+
"step": 1207
|
8459 |
+
},
|
8460 |
+
{
|
8461 |
+
"epoch": 0.32920016351001496,
|
8462 |
+
"grad_norm": 0.1875656396150589,
|
8463 |
+
"learning_rate": 5.479930483544521e-06,
|
8464 |
+
"loss": 0.6429,
|
8465 |
+
"step": 1208
|
8466 |
+
},
|
8467 |
+
{
|
8468 |
+
"epoch": 0.32947268020166237,
|
8469 |
+
"grad_norm": 0.22325171530246735,
|
8470 |
+
"learning_rate": 5.429546517164486e-06,
|
8471 |
+
"loss": 0.7533,
|
8472 |
+
"step": 1209
|
8473 |
+
},
|
8474 |
+
{
|
8475 |
+
"epoch": 0.3297451968933097,
|
8476 |
+
"grad_norm": 0.20884425938129425,
|
8477 |
+
"learning_rate": 5.379381942444051e-06,
|
8478 |
+
"loss": 0.7362,
|
8479 |
+
"step": 1210
|
8480 |
+
},
|
8481 |
+
{
|
8482 |
+
"epoch": 0.3300177135849571,
|
8483 |
+
"grad_norm": 0.2273874580860138,
|
8484 |
+
"learning_rate": 5.329437006311122e-06,
|
8485 |
+
"loss": 0.6831,
|
8486 |
+
"step": 1211
|
8487 |
+
},
|
8488 |
+
{
|
8489 |
+
"epoch": 0.3302902302766044,
|
8490 |
+
"grad_norm": 0.23726725578308105,
|
8491 |
+
"learning_rate": 5.2797119546124705e-06,
|
8492 |
+
"loss": 0.6726,
|
8493 |
+
"step": 1212
|
8494 |
+
},
|
8495 |
+
{
|
8496 |
+
"epoch": 0.33056274696825183,
|
8497 |
+
"grad_norm": 0.22136268019676208,
|
8498 |
+
"learning_rate": 5.23020703211255e-06,
|
8499 |
+
"loss": 0.697,
|
8500 |
+
"step": 1213
|
8501 |
+
},
|
8502 |
+
{
|
8503 |
+
"epoch": 0.3308352636598992,
|
8504 |
+
"grad_norm": 0.23647244274616241,
|
8505 |
+
"learning_rate": 5.1809224824922175e-06,
|
8506 |
+
"loss": 0.8098,
|
8507 |
+
"step": 1214
|
8508 |
+
},
|
8509 |
+
{
|
8510 |
+
"epoch": 0.33110778035154653,
|
8511 |
+
"grad_norm": 0.22740037739276886,
|
8512 |
+
"learning_rate": 5.131858548347596e-06,
|
8513 |
+
"loss": 0.7298,
|
8514 |
+
"step": 1215
|
8515 |
+
},
|
8516 |
+
{
|
8517 |
+
"epoch": 0.3313802970431939,
|
8518 |
+
"grad_norm": 0.2578193247318268,
|
8519 |
+
"learning_rate": 5.083015471188851e-06,
|
8520 |
+
"loss": 0.6665,
|
8521 |
+
"step": 1216
|
8522 |
+
},
|
8523 |
+
{
|
8524 |
+
"epoch": 0.33165281373484123,
|
8525 |
+
"grad_norm": 0.2765848636627197,
|
8526 |
+
"learning_rate": 5.034393491439043e-06,
|
8527 |
+
"loss": 0.7543,
|
8528 |
+
"step": 1217
|
8529 |
+
},
|
8530 |
+
{
|
8531 |
+
"epoch": 0.33192533042648864,
|
8532 |
+
"grad_norm": 0.28699976205825806,
|
8533 |
+
"learning_rate": 4.985992848432869e-06,
|
8534 |
+
"loss": 0.7952,
|
8535 |
+
"step": 1218
|
8536 |
+
},
|
8537 |
+
{
|
8538 |
+
"epoch": 0.332197847118136,
|
8539 |
+
"grad_norm": 0.23524689674377441,
|
8540 |
+
"learning_rate": 4.93781378041554e-06,
|
8541 |
+
"loss": 0.681,
|
8542 |
+
"step": 1219
|
8543 |
+
},
|
8544 |
+
{
|
8545 |
+
"epoch": 0.33247036380978334,
|
8546 |
+
"grad_norm": 0.3059777319431305,
|
8547 |
+
"learning_rate": 4.889856524541625e-06,
|
8548 |
+
"loss": 0.8537,
|
8549 |
+
"step": 1220
|
8550 |
+
},
|
8551 |
+
{
|
8552 |
+
"epoch": 0.3327428805014307,
|
8553 |
+
"grad_norm": 0.24600951373577118,
|
8554 |
+
"learning_rate": 4.842121316873821e-06,
|
8555 |
+
"loss": 0.7111,
|
8556 |
+
"step": 1221
|
8557 |
+
},
|
8558 |
+
{
|
8559 |
+
"epoch": 0.3330153971930781,
|
8560 |
+
"grad_norm": 0.2652858793735504,
|
8561 |
+
"learning_rate": 4.794608392381828e-06,
|
8562 |
+
"loss": 0.7779,
|
8563 |
+
"step": 1222
|
8564 |
+
},
|
8565 |
+
{
|
8566 |
+
"epoch": 0.33328791388472545,
|
8567 |
+
"grad_norm": 0.2868070900440216,
|
8568 |
+
"learning_rate": 4.747317984941213e-06,
|
8569 |
+
"loss": 0.8745,
|
8570 |
+
"step": 1223
|
8571 |
+
},
|
8572 |
+
{
|
8573 |
+
"epoch": 0.3335604305763728,
|
8574 |
+
"grad_norm": 0.27256304025650024,
|
8575 |
+
"learning_rate": 4.700250327332206e-06,
|
8576 |
+
"loss": 0.7776,
|
8577 |
+
"step": 1224
|
8578 |
+
},
|
8579 |
+
{
|
8580 |
+
"epoch": 0.33383294726802015,
|
8581 |
+
"grad_norm": 0.3164566457271576,
|
8582 |
+
"learning_rate": 4.653405651238607e-06,
|
8583 |
+
"loss": 0.7914,
|
8584 |
+
"step": 1225
|
8585 |
+
},
|
8586 |
+
{
|
8587 |
+
"epoch": 0.3341054639596675,
|
8588 |
+
"grad_norm": 0.27297550439834595,
|
8589 |
+
"learning_rate": 4.606784187246587e-06,
|
8590 |
+
"loss": 0.8135,
|
8591 |
+
"step": 1226
|
8592 |
+
},
|
8593 |
+
{
|
8594 |
+
"epoch": 0.3343779806513149,
|
8595 |
+
"grad_norm": 0.2950969636440277,
|
8596 |
+
"learning_rate": 4.560386164843639e-06,
|
8597 |
+
"loss": 0.9769,
|
8598 |
+
"step": 1227
|
8599 |
+
},
|
8600 |
+
{
|
8601 |
+
"epoch": 0.33465049734296226,
|
8602 |
+
"grad_norm": 0.34373223781585693,
|
8603 |
+
"learning_rate": 4.514211812417352e-06,
|
8604 |
+
"loss": 0.8159,
|
8605 |
+
"step": 1228
|
8606 |
+
},
|
8607 |
+
{
|
8608 |
+
"epoch": 0.3349230140346096,
|
8609 |
+
"grad_norm": 0.3288831114768982,
|
8610 |
+
"learning_rate": 4.468261357254339e-06,
|
8611 |
+
"loss": 0.7217,
|
8612 |
+
"step": 1229
|
8613 |
+
},
|
8614 |
+
{
|
8615 |
+
"epoch": 0.33519553072625696,
|
8616 |
+
"grad_norm": 0.3438679873943329,
|
8617 |
+
"learning_rate": 4.422535025539143e-06,
|
8618 |
+
"loss": 0.8504,
|
8619 |
+
"step": 1230
|
8620 |
+
},
|
8621 |
+
{
|
8622 |
+
"epoch": 0.33546804741790437,
|
8623 |
+
"grad_norm": 0.3123156428337097,
|
8624 |
+
"learning_rate": 4.377033042353063e-06,
|
8625 |
+
"loss": 0.7443,
|
8626 |
+
"step": 1231
|
8627 |
+
},
|
8628 |
+
{
|
8629 |
+
"epoch": 0.3357405641095517,
|
8630 |
+
"grad_norm": 0.3916204869747162,
|
8631 |
+
"learning_rate": 4.331755631673057e-06,
|
8632 |
+
"loss": 0.7213,
|
8633 |
+
"step": 1232
|
8634 |
+
},
|
8635 |
+
{
|
8636 |
+
"epoch": 0.33601308080119907,
|
8637 |
+
"grad_norm": 0.40982723236083984,
|
8638 |
+
"learning_rate": 4.286703016370719e-06,
|
8639 |
+
"loss": 0.9166,
|
8640 |
+
"step": 1233
|
8641 |
+
},
|
8642 |
+
{
|
8643 |
+
"epoch": 0.3362855974928464,
|
8644 |
+
"grad_norm": 0.36077848076820374,
|
8645 |
+
"learning_rate": 4.241875418211039e-06,
|
8646 |
+
"loss": 0.8182,
|
8647 |
+
"step": 1234
|
8648 |
+
},
|
8649 |
+
{
|
8650 |
+
"epoch": 0.3365581141844938,
|
8651 |
+
"grad_norm": 0.4006083309650421,
|
8652 |
+
"learning_rate": 4.197273057851464e-06,
|
8653 |
+
"loss": 0.7847,
|
8654 |
+
"step": 1235
|
8655 |
+
},
|
8656 |
+
{
|
8657 |
+
"epoch": 0.3368306308761412,
|
8658 |
+
"grad_norm": 0.41497910022735596,
|
8659 |
+
"learning_rate": 4.152896154840691e-06,
|
8660 |
+
"loss": 0.8144,
|
8661 |
+
"step": 1236
|
8662 |
+
},
|
8663 |
+
{
|
8664 |
+
"epoch": 0.33710314756778853,
|
8665 |
+
"grad_norm": 0.3812021017074585,
|
8666 |
+
"learning_rate": 4.108744927617669e-06,
|
8667 |
+
"loss": 0.7856,
|
8668 |
+
"step": 1237
|
8669 |
+
},
|
8670 |
+
{
|
8671 |
+
"epoch": 0.3373756642594359,
|
8672 |
+
"grad_norm": 0.4377935528755188,
|
8673 |
+
"learning_rate": 4.064819593510477e-06,
|
8674 |
+
"loss": 0.8722,
|
8675 |
+
"step": 1238
|
8676 |
+
},
|
8677 |
+
{
|
8678 |
+
"epoch": 0.33764818095108323,
|
8679 |
+
"grad_norm": 0.55827397108078,
|
8680 |
+
"learning_rate": 4.021120368735254e-06,
|
8681 |
+
"loss": 0.7971,
|
8682 |
+
"step": 1239
|
8683 |
+
},
|
8684 |
+
{
|
8685 |
+
"epoch": 0.33792069764273064,
|
8686 |
+
"grad_norm": 0.48777827620506287,
|
8687 |
+
"learning_rate": 3.9776474683951795e-06,
|
8688 |
+
"loss": 0.7748,
|
8689 |
+
"step": 1240
|
8690 |
+
},
|
8691 |
+
{
|
8692 |
+
"epoch": 0.338193214334378,
|
8693 |
+
"grad_norm": 0.4867183566093445,
|
8694 |
+
"learning_rate": 3.9344011064793516e-06,
|
8695 |
+
"loss": 0.7869,
|
8696 |
+
"step": 1241
|
8697 |
+
},
|
8698 |
+
{
|
8699 |
+
"epoch": 0.33846573102602534,
|
8700 |
+
"grad_norm": 0.483394980430603,
|
8701 |
+
"learning_rate": 3.891381495861779e-06,
|
8702 |
+
"loss": 0.8474,
|
8703 |
+
"step": 1242
|
8704 |
+
},
|
8705 |
+
{
|
8706 |
+
"epoch": 0.3387382477176727,
|
8707 |
+
"grad_norm": 0.6140143275260925,
|
8708 |
+
"learning_rate": 3.8485888483003385e-06,
|
8709 |
+
"loss": 0.9177,
|
8710 |
+
"step": 1243
|
8711 |
+
},
|
8712 |
+
{
|
8713 |
+
"epoch": 0.3390107644093201,
|
8714 |
+
"grad_norm": 0.6502029299736023,
|
8715 |
+
"learning_rate": 3.8060233744356633e-06,
|
8716 |
+
"loss": 0.7796,
|
8717 |
+
"step": 1244
|
8718 |
+
},
|
8719 |
+
{
|
8720 |
+
"epoch": 0.33928328110096745,
|
8721 |
+
"grad_norm": 0.768630862236023,
|
8722 |
+
"learning_rate": 3.763685283790208e-06,
|
8723 |
+
"loss": 1.0335,
|
8724 |
+
"step": 1245
|
8725 |
+
},
|
8726 |
+
{
|
8727 |
+
"epoch": 0.3395557977926148,
|
8728 |
+
"grad_norm": 0.8868127465248108,
|
8729 |
+
"learning_rate": 3.721574784767162e-06,
|
8730 |
+
"loss": 0.9071,
|
8731 |
+
"step": 1246
|
8732 |
+
},
|
8733 |
+
{
|
8734 |
+
"epoch": 0.33982831448426215,
|
8735 |
+
"grad_norm": 0.7759135365486145,
|
8736 |
+
"learning_rate": 3.679692084649372e-06,
|
8737 |
+
"loss": 0.8559,
|
8738 |
+
"step": 1247
|
8739 |
+
},
|
8740 |
+
{
|
8741 |
+
"epoch": 0.3401008311759095,
|
8742 |
+
"grad_norm": 0.8646159768104553,
|
8743 |
+
"learning_rate": 3.638037389598453e-06,
|
8744 |
+
"loss": 0.9137,
|
8745 |
+
"step": 1248
|
8746 |
+
},
|
8747 |
+
{
|
8748 |
+
"epoch": 0.3403733478675569,
|
8749 |
+
"grad_norm": 1.10651433467865,
|
8750 |
+
"learning_rate": 3.596610904653652e-06,
|
8751 |
+
"loss": 1.0428,
|
8752 |
+
"step": 1249
|
8753 |
+
},
|
8754 |
+
{
|
8755 |
+
"epoch": 0.34064586455920426,
|
8756 |
+
"grad_norm": 2.0572776794433594,
|
8757 |
+
"learning_rate": 3.555412833730881e-06,
|
8758 |
+
"loss": 1.1088,
|
8759 |
+
"step": 1250
|
8760 |
+
},
|
8761 |
+
{
|
8762 |
+
"epoch": 0.3409183812508516,
|
8763 |
+
"grad_norm": 0.19651058316230774,
|
8764 |
+
"learning_rate": 3.5144433796217515e-06,
|
8765 |
+
"loss": 0.4734,
|
8766 |
+
"step": 1251
|
8767 |
+
},
|
8768 |
+
{
|
8769 |
+
"epoch": 0.34119089794249896,
|
8770 |
+
"grad_norm": 0.23292307555675507,
|
8771 |
+
"learning_rate": 3.4737027439925072e-06,
|
8772 |
+
"loss": 0.7076,
|
8773 |
+
"step": 1252
|
8774 |
+
},
|
8775 |
+
{
|
8776 |
+
"epoch": 0.34146341463414637,
|
8777 |
+
"grad_norm": 0.21416880190372467,
|
8778 |
+
"learning_rate": 3.4331911273830784e-06,
|
8779 |
+
"loss": 0.7243,
|
8780 |
+
"step": 1253
|
8781 |
+
},
|
8782 |
+
{
|
8783 |
+
"epoch": 0.3417359313257937,
|
8784 |
+
"grad_norm": 0.18489998579025269,
|
8785 |
+
"learning_rate": 3.3929087292060903e-06,
|
8786 |
+
"loss": 0.6635,
|
8787 |
+
"step": 1254
|
8788 |
+
},
|
8789 |
+
{
|
8790 |
+
"epoch": 0.34200844801744107,
|
8791 |
+
"grad_norm": 0.18693317472934723,
|
8792 |
+
"learning_rate": 3.352855747745859e-06,
|
8793 |
+
"loss": 0.6376,
|
8794 |
+
"step": 1255
|
8795 |
+
},
|
8796 |
+
{
|
8797 |
+
"epoch": 0.3422809647090884,
|
8798 |
+
"grad_norm": 0.18244442343711853,
|
8799 |
+
"learning_rate": 3.313032380157455e-06,
|
8800 |
+
"loss": 0.5983,
|
8801 |
+
"step": 1256
|
8802 |
+
},
|
8803 |
+
{
|
8804 |
+
"epoch": 0.34255348140073577,
|
8805 |
+
"grad_norm": 0.18967251479625702,
|
8806 |
+
"learning_rate": 3.2734388224656575e-06,
|
8807 |
+
"loss": 0.6117,
|
8808 |
+
"step": 1257
|
8809 |
+
},
|
8810 |
+
{
|
8811 |
+
"epoch": 0.3428259980923832,
|
8812 |
+
"grad_norm": 0.18715858459472656,
|
8813 |
+
"learning_rate": 3.2340752695640965e-06,
|
8814 |
+
"loss": 0.5481,
|
8815 |
+
"step": 1258
|
8816 |
+
},
|
8817 |
+
{
|
8818 |
+
"epoch": 0.34309851478403053,
|
8819 |
+
"grad_norm": 0.20008553564548492,
|
8820 |
+
"learning_rate": 3.1949419152142e-06,
|
8821 |
+
"loss": 0.7321,
|
8822 |
+
"step": 1259
|
8823 |
+
},
|
8824 |
+
{
|
8825 |
+
"epoch": 0.3433710314756779,
|
8826 |
+
"grad_norm": 0.20943793654441833,
|
8827 |
+
"learning_rate": 3.1560389520442822e-06,
|
8828 |
+
"loss": 0.7499,
|
8829 |
+
"step": 1260
|
8830 |
+
},
|
8831 |
+
{
|
8832 |
+
"epoch": 0.34364354816732523,
|
8833 |
+
"grad_norm": 0.213802307844162,
|
8834 |
+
"learning_rate": 3.117366571548608e-06,
|
8835 |
+
"loss": 0.7314,
|
8836 |
+
"step": 1261
|
8837 |
+
},
|
8838 |
+
{
|
8839 |
+
"epoch": 0.34391606485897264,
|
8840 |
+
"grad_norm": 0.21895675361156464,
|
8841 |
+
"learning_rate": 3.0789249640864158e-06,
|
8842 |
+
"loss": 0.716,
|
8843 |
+
"step": 1262
|
8844 |
+
},
|
8845 |
+
{
|
8846 |
+
"epoch": 0.34418858155062,
|
8847 |
+
"grad_norm": 0.2621181607246399,
|
8848 |
+
"learning_rate": 3.0407143188809884e-06,
|
8849 |
+
"loss": 0.8859,
|
8850 |
+
"step": 1263
|
8851 |
+
},
|
8852 |
+
{
|
8853 |
+
"epoch": 0.34446109824226734,
|
8854 |
+
"grad_norm": 0.25185903906822205,
|
8855 |
+
"learning_rate": 3.0027348240187658e-06,
|
8856 |
+
"loss": 0.7664,
|
8857 |
+
"step": 1264
|
8858 |
+
},
|
8859 |
+
{
|
8860 |
+
"epoch": 0.3447336149339147,
|
8861 |
+
"grad_norm": 0.22787964344024658,
|
8862 |
+
"learning_rate": 2.9649866664483385e-06,
|
8863 |
+
"loss": 0.702,
|
8864 |
+
"step": 1265
|
8865 |
+
},
|
8866 |
+
{
|
8867 |
+
"epoch": 0.34500613162556204,
|
8868 |
+
"grad_norm": 0.23583833873271942,
|
8869 |
+
"learning_rate": 2.9274700319796065e-06,
|
8870 |
+
"loss": 0.6844,
|
8871 |
+
"step": 1266
|
8872 |
+
},
|
8873 |
+
{
|
8874 |
+
"epoch": 0.34527864831720945,
|
8875 |
+
"grad_norm": 0.2521907389163971,
|
8876 |
+
"learning_rate": 2.8901851052828e-06,
|
8877 |
+
"loss": 0.744,
|
8878 |
+
"step": 1267
|
8879 |
+
},
|
8880 |
+
{
|
8881 |
+
"epoch": 0.3455511650088568,
|
8882 |
+
"grad_norm": 0.3106304407119751,
|
8883 |
+
"learning_rate": 2.853132069887643e-06,
|
8884 |
+
"loss": 0.9315,
|
8885 |
+
"step": 1268
|
8886 |
+
},
|
8887 |
+
{
|
8888 |
+
"epoch": 0.34582368170050415,
|
8889 |
+
"grad_norm": 0.26413318514823914,
|
8890 |
+
"learning_rate": 2.816311108182368e-06,
|
8891 |
+
"loss": 0.8991,
|
8892 |
+
"step": 1269
|
8893 |
+
},
|
8894 |
+
{
|
8895 |
+
"epoch": 0.3460961983921515,
|
8896 |
+
"grad_norm": 0.25055551528930664,
|
8897 |
+
"learning_rate": 2.779722401412871e-06,
|
8898 |
+
"loss": 0.7858,
|
8899 |
+
"step": 1270
|
8900 |
+
},
|
8901 |
+
{
|
8902 |
+
"epoch": 0.3463687150837989,
|
8903 |
+
"grad_norm": 0.31429582834243774,
|
8904 |
+
"learning_rate": 2.7433661296818235e-06,
|
8905 |
+
"loss": 0.7714,
|
8906 |
+
"step": 1271
|
8907 |
+
},
|
8908 |
+
{
|
8909 |
+
"epoch": 0.34664123177544626,
|
8910 |
+
"grad_norm": 0.2546524703502655,
|
8911 |
+
"learning_rate": 2.707242471947746e-06,
|
8912 |
+
"loss": 0.8173,
|
8913 |
+
"step": 1272
|
8914 |
+
},
|
8915 |
+
{
|
8916 |
+
"epoch": 0.3469137484670936,
|
8917 |
+
"grad_norm": 0.2713319957256317,
|
8918 |
+
"learning_rate": 2.6713516060241527e-06,
|
8919 |
+
"loss": 0.7835,
|
8920 |
+
"step": 1273
|
8921 |
+
},
|
8922 |
+
{
|
8923 |
+
"epoch": 0.34718626515874096,
|
8924 |
+
"grad_norm": 0.31074321269989014,
|
8925 |
+
"learning_rate": 2.635693708578696e-06,
|
8926 |
+
"loss": 0.8184,
|
8927 |
+
"step": 1274
|
8928 |
+
},
|
8929 |
+
{
|
8930 |
+
"epoch": 0.3474587818503883,
|
8931 |
+
"grad_norm": 0.2898237407207489,
|
8932 |
+
"learning_rate": 2.60026895513224e-06,
|
8933 |
+
"loss": 0.7461,
|
8934 |
+
"step": 1275
|
8935 |
+
},
|
8936 |
+
{
|
8937 |
+
"epoch": 0.3477312985420357,
|
8938 |
+
"grad_norm": 0.25462061166763306,
|
8939 |
+
"learning_rate": 2.5650775200580624e-06,
|
8940 |
+
"loss": 0.7655,
|
8941 |
+
"step": 1276
|
8942 |
+
},
|
8943 |
+
{
|
8944 |
+
"epoch": 0.34800381523368307,
|
8945 |
+
"grad_norm": 0.3492688834667206,
|
8946 |
+
"learning_rate": 2.530119576580936e-06,
|
8947 |
+
"loss": 0.8651,
|
8948 |
+
"step": 1277
|
8949 |
+
},
|
8950 |
+
{
|
8951 |
+
"epoch": 0.3482763319253304,
|
8952 |
+
"grad_norm": 0.32055115699768066,
|
8953 |
+
"learning_rate": 2.4953952967763317e-06,
|
8954 |
+
"loss": 0.8433,
|
8955 |
+
"step": 1278
|
8956 |
+
},
|
8957 |
+
{
|
8958 |
+
"epoch": 0.34854884861697777,
|
8959 |
+
"grad_norm": 0.34110864996910095,
|
8960 |
+
"learning_rate": 2.4609048515695343e-06,
|
8961 |
+
"loss": 0.886,
|
8962 |
+
"step": 1279
|
8963 |
+
},
|
8964 |
+
{
|
8965 |
+
"epoch": 0.3488213653086252,
|
8966 |
+
"grad_norm": 0.302672803401947,
|
8967 |
+
"learning_rate": 2.426648410734794e-06,
|
8968 |
+
"loss": 0.6645,
|
8969 |
+
"step": 1280
|
8970 |
+
},
|
8971 |
+
{
|
8972 |
+
"epoch": 0.3490938820002725,
|
8973 |
+
"grad_norm": 0.36506494879722595,
|
8974 |
+
"learning_rate": 2.3926261428945383e-06,
|
8975 |
+
"loss": 0.6815,
|
8976 |
+
"step": 1281
|
8977 |
+
},
|
8978 |
+
{
|
8979 |
+
"epoch": 0.3493663986919199,
|
8980 |
+
"grad_norm": 0.3430880904197693,
|
8981 |
+
"learning_rate": 2.3588382155184998e-06,
|
8982 |
+
"loss": 0.8067,
|
8983 |
+
"step": 1282
|
8984 |
+
},
|
8985 |
+
{
|
8986 |
+
"epoch": 0.34963891538356723,
|
8987 |
+
"grad_norm": 0.3808124363422394,
|
8988 |
+
"learning_rate": 2.3252847949228827e-06,
|
8989 |
+
"loss": 0.9903,
|
8990 |
+
"step": 1283
|
8991 |
+
},
|
8992 |
+
{
|
8993 |
+
"epoch": 0.34991143207521463,
|
8994 |
+
"grad_norm": 0.40509849786758423,
|
8995 |
+
"learning_rate": 2.2919660462696056e-06,
|
8996 |
+
"loss": 0.8571,
|
8997 |
+
"step": 1284
|
8998 |
+
},
|
8999 |
+
{
|
9000 |
+
"epoch": 0.350183948766862,
|
9001 |
+
"grad_norm": 0.45389968156814575,
|
9002 |
+
"learning_rate": 2.258882133565404e-06,
|
9003 |
+
"loss": 0.8553,
|
9004 |
+
"step": 1285
|
9005 |
+
},
|
9006 |
+
{
|
9007 |
+
"epoch": 0.35045646545850934,
|
9008 |
+
"grad_norm": 0.42130762338638306,
|
9009 |
+
"learning_rate": 2.2260332196611e-06,
|
9010 |
+
"loss": 0.8894,
|
9011 |
+
"step": 1286
|
9012 |
+
},
|
9013 |
+
{
|
9014 |
+
"epoch": 0.3507289821501567,
|
9015 |
+
"grad_norm": 0.4185850918292999,
|
9016 |
+
"learning_rate": 2.1934194662507736e-06,
|
9017 |
+
"loss": 0.7963,
|
9018 |
+
"step": 1287
|
9019 |
+
},
|
9020 |
+
{
|
9021 |
+
"epoch": 0.35100149884180404,
|
9022 |
+
"grad_norm": 0.49727383255958557,
|
9023 |
+
"learning_rate": 2.161041033870914e-06,
|
9024 |
+
"loss": 0.9345,
|
9025 |
+
"step": 1288
|
9026 |
+
},
|
9027 |
+
{
|
9028 |
+
"epoch": 0.35127401553345144,
|
9029 |
+
"grad_norm": 0.47523200511932373,
|
9030 |
+
"learning_rate": 2.1288980818997275e-06,
|
9031 |
+
"loss": 0.8097,
|
9032 |
+
"step": 1289
|
9033 |
+
},
|
9034 |
+
{
|
9035 |
+
"epoch": 0.3515465322250988,
|
9036 |
+
"grad_norm": 0.5790581107139587,
|
9037 |
+
"learning_rate": 2.0969907685562783e-06,
|
9038 |
+
"loss": 0.9161,
|
9039 |
+
"step": 1290
|
9040 |
+
},
|
9041 |
+
{
|
9042 |
+
"epoch": 0.35181904891674615,
|
9043 |
+
"grad_norm": 0.6227943897247314,
|
9044 |
+
"learning_rate": 2.0653192508997223e-06,
|
9045 |
+
"loss": 0.9568,
|
9046 |
+
"step": 1291
|
9047 |
+
},
|
9048 |
+
{
|
9049 |
+
"epoch": 0.3520915656083935,
|
9050 |
+
"grad_norm": 0.6085624098777771,
|
9051 |
+
"learning_rate": 2.03388368482858e-06,
|
9052 |
+
"loss": 0.82,
|
9053 |
+
"step": 1292
|
9054 |
+
},
|
9055 |
+
{
|
9056 |
+
"epoch": 0.3523640823000409,
|
9057 |
+
"grad_norm": 0.6576049327850342,
|
9058 |
+
"learning_rate": 2.0026842250799038e-06,
|
9059 |
+
"loss": 0.8373,
|
9060 |
+
"step": 1293
|
9061 |
+
},
|
9062 |
+
{
|
9063 |
+
"epoch": 0.35263659899168825,
|
9064 |
+
"grad_norm": 0.5606847405433655,
|
9065 |
+
"learning_rate": 1.971721025228551e-06,
|
9066 |
+
"loss": 0.8869,
|
9067 |
+
"step": 1294
|
9068 |
+
},
|
9069 |
+
{
|
9070 |
+
"epoch": 0.3529091156833356,
|
9071 |
+
"grad_norm": 0.6223573684692383,
|
9072 |
+
"learning_rate": 1.9409942376864333e-06,
|
9073 |
+
"loss": 0.8014,
|
9074 |
+
"step": 1295
|
9075 |
+
},
|
9076 |
+
{
|
9077 |
+
"epoch": 0.35318163237498296,
|
9078 |
+
"grad_norm": 0.6554602980613708,
|
9079 |
+
"learning_rate": 1.910504013701747e-06,
|
9080 |
+
"loss": 0.9007,
|
9081 |
+
"step": 1296
|
9082 |
+
},
|
9083 |
+
{
|
9084 |
+
"epoch": 0.3534541490666303,
|
9085 |
+
"grad_norm": 0.7746028304100037,
|
9086 |
+
"learning_rate": 1.8802505033582608e-06,
|
9087 |
+
"loss": 0.8366,
|
9088 |
+
"step": 1297
|
9089 |
+
},
|
9090 |
+
{
|
9091 |
+
"epoch": 0.3537266657582777,
|
9092 |
+
"grad_norm": 0.9623523950576782,
|
9093 |
+
"learning_rate": 1.8502338555745124e-06,
|
9094 |
+
"loss": 0.8232,
|
9095 |
+
"step": 1298
|
9096 |
+
},
|
9097 |
+
{
|
9098 |
+
"epoch": 0.35399918244992506,
|
9099 |
+
"grad_norm": 1.0030081272125244,
|
9100 |
+
"learning_rate": 1.8204542181031569e-06,
|
9101 |
+
"loss": 1.0403,
|
9102 |
+
"step": 1299
|
9103 |
+
},
|
9104 |
+
{
|
9105 |
+
"epoch": 0.3542716991415724,
|
9106 |
+
"grad_norm": 1.3541580438613892,
|
9107 |
+
"learning_rate": 1.790911737530182e-06,
|
9108 |
+
"loss": 0.7661,
|
9109 |
+
"step": 1300
|
9110 |
+
},
|
9111 |
+
{
|
9112 |
+
"epoch": 0.35454421583321977,
|
9113 |
+
"grad_norm": 0.20087184011936188,
|
9114 |
+
"learning_rate": 1.7616065592742038e-06,
|
9115 |
+
"loss": 0.6685,
|
9116 |
+
"step": 1301
|
9117 |
+
},
|
9118 |
+
{
|
9119 |
+
"epoch": 0.3548167325248672,
|
9120 |
+
"grad_norm": 0.26015710830688477,
|
9121 |
+
"learning_rate": 1.7325388275857612e-06,
|
9122 |
+
"loss": 0.6228,
|
9123 |
+
"step": 1302
|
9124 |
+
},
|
9125 |
+
{
|
9126 |
+
"epoch": 0.3550892492165145,
|
9127 |
+
"grad_norm": 0.20169168710708618,
|
9128 |
+
"learning_rate": 1.70370868554659e-06,
|
9129 |
+
"loss": 0.6263,
|
9130 |
+
"step": 1303
|
9131 |
+
},
|
9132 |
+
{
|
9133 |
+
"epoch": 0.3553617659081619,
|
9134 |
+
"grad_norm": 0.16958947479724884,
|
9135 |
+
"learning_rate": 1.6751162750689164e-06,
|
9136 |
+
"loss": 0.5615,
|
9137 |
+
"step": 1304
|
9138 |
+
},
|
9139 |
+
{
|
9140 |
+
"epoch": 0.3556342825998092,
|
9141 |
+
"grad_norm": 0.1827636957168579,
|
9142 |
+
"learning_rate": 1.6467617368947918e-06,
|
9143 |
+
"loss": 0.6226,
|
9144 |
+
"step": 1305
|
9145 |
+
},
|
9146 |
+
{
|
9147 |
+
"epoch": 0.3559067992914566,
|
9148 |
+
"grad_norm": 0.17883753776550293,
|
9149 |
+
"learning_rate": 1.6186452105953597e-06,
|
9150 |
+
"loss": 0.543,
|
9151 |
+
"step": 1306
|
9152 |
+
},
|
9153 |
+
{
|
9154 |
+
"epoch": 0.356179315983104,
|
9155 |
+
"grad_norm": 0.22228962182998657,
|
9156 |
+
"learning_rate": 1.590766834570173e-06,
|
9157 |
+
"loss": 0.671,
|
9158 |
+
"step": 1307
|
9159 |
+
},
|
9160 |
+
{
|
9161 |
+
"epoch": 0.35645183267475133,
|
9162 |
+
"grad_norm": 0.18305955827236176,
|
9163 |
+
"learning_rate": 1.5631267460465393e-06,
|
9164 |
+
"loss": 0.5974,
|
9165 |
+
"step": 1308
|
9166 |
+
},
|
9167 |
+
{
|
9168 |
+
"epoch": 0.3567243493663987,
|
9169 |
+
"grad_norm": 0.20763877034187317,
|
9170 |
+
"learning_rate": 1.5357250810788314e-06,
|
9171 |
+
"loss": 0.696,
|
9172 |
+
"step": 1309
|
9173 |
+
},
|
9174 |
+
{
|
9175 |
+
"epoch": 0.35699686605804604,
|
9176 |
+
"grad_norm": 0.19930560886859894,
|
9177 |
+
"learning_rate": 1.508561974547812e-06,
|
9178 |
+
"loss": 0.7149,
|
9179 |
+
"step": 1310
|
9180 |
+
},
|
9181 |
+
{
|
9182 |
+
"epoch": 0.35726938274969344,
|
9183 |
+
"grad_norm": 0.21154768764972687,
|
9184 |
+
"learning_rate": 1.4816375601599652e-06,
|
9185 |
+
"loss": 0.7226,
|
9186 |
+
"step": 1311
|
9187 |
+
},
|
9188 |
+
{
|
9189 |
+
"epoch": 0.3575418994413408,
|
9190 |
+
"grad_norm": 0.25318780541419983,
|
9191 |
+
"learning_rate": 1.4549519704468718e-06,
|
9192 |
+
"loss": 0.7079,
|
9193 |
+
"step": 1312
|
9194 |
+
},
|
9195 |
+
{
|
9196 |
+
"epoch": 0.35781441613298814,
|
9197 |
+
"grad_norm": 0.24299569427967072,
|
9198 |
+
"learning_rate": 1.4285053367645074e-06,
|
9199 |
+
"loss": 0.778,
|
9200 |
+
"step": 1313
|
9201 |
+
},
|
9202 |
+
{
|
9203 |
+
"epoch": 0.3580869328246355,
|
9204 |
+
"grad_norm": 0.22785383462905884,
|
9205 |
+
"learning_rate": 1.4022977892926226e-06,
|
9206 |
+
"loss": 0.7161,
|
9207 |
+
"step": 1314
|
9208 |
+
},
|
9209 |
+
{
|
9210 |
+
"epoch": 0.35835944951628285,
|
9211 |
+
"grad_norm": 0.27268078923225403,
|
9212 |
+
"learning_rate": 1.37632945703412e-06,
|
9213 |
+
"loss": 0.7929,
|
9214 |
+
"step": 1315
|
9215 |
+
},
|
9216 |
+
{
|
9217 |
+
"epoch": 0.35863196620793025,
|
9218 |
+
"grad_norm": 0.23317265510559082,
|
9219 |
+
"learning_rate": 1.3506004678143836e-06,
|
9220 |
+
"loss": 0.6683,
|
9221 |
+
"step": 1316
|
9222 |
+
},
|
9223 |
+
{
|
9224 |
+
"epoch": 0.3589044828995776,
|
9225 |
+
"grad_norm": 0.28031599521636963,
|
9226 |
+
"learning_rate": 1.3251109482806666e-06,
|
9227 |
+
"loss": 0.7558,
|
9228 |
+
"step": 1317
|
9229 |
+
},
|
9230 |
+
{
|
9231 |
+
"epoch": 0.35917699959122495,
|
9232 |
+
"grad_norm": 0.2568948268890381,
|
9233 |
+
"learning_rate": 1.2998610239014775e-06,
|
9234 |
+
"loss": 0.6871,
|
9235 |
+
"step": 1318
|
9236 |
+
},
|
9237 |
+
{
|
9238 |
+
"epoch": 0.3594495162828723,
|
9239 |
+
"grad_norm": 0.25137385725975037,
|
9240 |
+
"learning_rate": 1.2748508189659447e-06,
|
9241 |
+
"loss": 0.7247,
|
9242 |
+
"step": 1319
|
9243 |
+
},
|
9244 |
+
{
|
9245 |
+
"epoch": 0.3597220329745197,
|
9246 |
+
"grad_norm": 0.273794561624527,
|
9247 |
+
"learning_rate": 1.2500804565832025e-06,
|
9248 |
+
"loss": 0.7919,
|
9249 |
+
"step": 1320
|
9250 |
+
},
|
9251 |
+
{
|
9252 |
+
"epoch": 0.35999454966616706,
|
9253 |
+
"grad_norm": 0.2582818865776062,
|
9254 |
+
"learning_rate": 1.2255500586818014e-06,
|
9255 |
+
"loss": 0.7999,
|
9256 |
+
"step": 1321
|
9257 |
+
},
|
9258 |
+
{
|
9259 |
+
"epoch": 0.3602670663578144,
|
9260 |
+
"grad_norm": 0.2675579786300659,
|
9261 |
+
"learning_rate": 1.2012597460091201e-06,
|
9262 |
+
"loss": 0.706,
|
9263 |
+
"step": 1322
|
9264 |
+
},
|
9265 |
+
{
|
9266 |
+
"epoch": 0.36053958304946176,
|
9267 |
+
"grad_norm": 0.28468015789985657,
|
9268 |
+
"learning_rate": 1.177209638130733e-06,
|
9269 |
+
"loss": 0.7571,
|
9270 |
+
"step": 1323
|
9271 |
+
},
|
9272 |
+
{
|
9273 |
+
"epoch": 0.3608120997411091,
|
9274 |
+
"grad_norm": 0.26469913125038147,
|
9275 |
+
"learning_rate": 1.1533998534298263e-06,
|
9276 |
+
"loss": 0.7295,
|
9277 |
+
"step": 1324
|
9278 |
+
},
|
9279 |
+
{
|
9280 |
+
"epoch": 0.3610846164327565,
|
9281 |
+
"grad_norm": 0.31171098351478577,
|
9282 |
+
"learning_rate": 1.1298305091066664e-06,
|
9283 |
+
"loss": 0.8374,
|
9284 |
+
"step": 1325
|
9285 |
+
},
|
9286 |
+
{
|
9287 |
+
"epoch": 0.3613571331244039,
|
9288 |
+
"grad_norm": 0.29910188913345337,
|
9289 |
+
"learning_rate": 1.106501721177966e-06,
|
9290 |
+
"loss": 0.8056,
|
9291 |
+
"step": 1326
|
9292 |
+
},
|
9293 |
+
{
|
9294 |
+
"epoch": 0.3616296498160512,
|
9295 |
+
"grad_norm": 0.3416571617126465,
|
9296 |
+
"learning_rate": 1.0834136044763187e-06,
|
9297 |
+
"loss": 0.8173,
|
9298 |
+
"step": 1327
|
9299 |
+
},
|
9300 |
+
{
|
9301 |
+
"epoch": 0.3619021665076986,
|
9302 |
+
"grad_norm": 0.3243512809276581,
|
9303 |
+
"learning_rate": 1.060566272649688e-06,
|
9304 |
+
"loss": 0.8004,
|
9305 |
+
"step": 1328
|
9306 |
+
},
|
9307 |
+
{
|
9308 |
+
"epoch": 0.362174683199346,
|
9309 |
+
"grad_norm": 0.31588006019592285,
|
9310 |
+
"learning_rate": 1.037959838160768e-06,
|
9311 |
+
"loss": 0.7625,
|
9312 |
+
"step": 1329
|
9313 |
+
},
|
9314 |
+
{
|
9315 |
+
"epoch": 0.36244719989099333,
|
9316 |
+
"grad_norm": 0.3250928819179535,
|
9317 |
+
"learning_rate": 1.0155944122864912e-06,
|
9318 |
+
"loss": 0.8438,
|
9319 |
+
"step": 1330
|
9320 |
+
},
|
9321 |
+
{
|
9322 |
+
"epoch": 0.3627197165826407,
|
9323 |
+
"grad_norm": 0.38825666904449463,
|
9324 |
+
"learning_rate": 9.93470105117461e-07,
|
9325 |
+
"loss": 0.8933,
|
9326 |
+
"step": 1331
|
9327 |
+
},
|
9328 |
+
{
|
9329 |
+
"epoch": 0.36299223327428803,
|
9330 |
+
"grad_norm": 0.33647429943084717,
|
9331 |
+
"learning_rate": 9.715870255573966e-07,
|
9332 |
+
"loss": 0.8995,
|
9333 |
+
"step": 1332
|
9334 |
+
},
|
9335 |
+
{
|
9336 |
+
"epoch": 0.36326474996593544,
|
9337 |
+
"grad_norm": 0.4018394351005554,
|
9338 |
+
"learning_rate": 9.499452813226284e-07,
|
9339 |
+
"loss": 0.9333,
|
9340 |
+
"step": 1333
|
9341 |
+
},
|
9342 |
+
{
|
9343 |
+
"epoch": 0.3635372666575828,
|
9344 |
+
"grad_norm": 0.36348339915275574,
|
9345 |
+
"learning_rate": 9.285449789415146e-07,
|
9346 |
+
"loss": 0.7715,
|
9347 |
+
"step": 1334
|
9348 |
+
},
|
9349 |
+
{
|
9350 |
+
"epoch": 0.36380978334923014,
|
9351 |
+
"grad_norm": 0.4206405282020569,
|
9352 |
+
"learning_rate": 9.073862237539976e-07,
|
9353 |
+
"loss": 0.7128,
|
9354 |
+
"step": 1335
|
9355 |
+
},
|
9356 |
+
{
|
9357 |
+
"epoch": 0.3640823000408775,
|
9358 |
+
"grad_norm": 0.4206981062889099,
|
9359 |
+
"learning_rate": 8.864691199109931e-07,
|
9360 |
+
"loss": 0.8466,
|
9361 |
+
"step": 1336
|
9362 |
+
},
|
9363 |
+
{
|
9364 |
+
"epoch": 0.36435481673252484,
|
9365 |
+
"grad_norm": 0.4286057949066162,
|
9366 |
+
"learning_rate": 8.657937703739516e-07,
|
9367 |
+
"loss": 0.9691,
|
9368 |
+
"step": 1337
|
9369 |
+
},
|
9370 |
+
{
|
9371 |
+
"epoch": 0.36462733342417225,
|
9372 |
+
"grad_norm": 0.5464884638786316,
|
9373 |
+
"learning_rate": 8.453602769143142e-07,
|
9374 |
+
"loss": 0.894,
|
9375 |
+
"step": 1338
|
9376 |
+
},
|
9377 |
+
{
|
9378 |
+
"epoch": 0.3648998501158196,
|
9379 |
+
"grad_norm": 0.514959454536438,
|
9380 |
+
"learning_rate": 8.251687401130137e-07,
|
9381 |
+
"loss": 0.833,
|
9382 |
+
"step": 1339
|
9383 |
+
},
|
9384 |
+
{
|
9385 |
+
"epoch": 0.36517236680746695,
|
9386 |
+
"grad_norm": 0.49453631043434143,
|
9387 |
+
"learning_rate": 8.052192593599905e-07,
|
9388 |
+
"loss": 0.8971,
|
9389 |
+
"step": 1340
|
9390 |
+
},
|
9391 |
+
{
|
9392 |
+
"epoch": 0.3654448834991143,
|
9393 |
+
"grad_norm": 0.5220718383789062,
|
9394 |
+
"learning_rate": 7.855119328537109e-07,
|
9395 |
+
"loss": 0.9248,
|
9396 |
+
"step": 1341
|
9397 |
+
},
|
9398 |
+
{
|
9399 |
+
"epoch": 0.3657174001907617,
|
9400 |
+
"grad_norm": 0.607807993888855,
|
9401 |
+
"learning_rate": 7.660468576006441e-07,
|
9402 |
+
"loss": 0.9675,
|
9403 |
+
"step": 1342
|
9404 |
+
},
|
9405 |
+
{
|
9406 |
+
"epoch": 0.36598991688240906,
|
9407 |
+
"grad_norm": 0.7026263475418091,
|
9408 |
+
"learning_rate": 7.46824129414847e-07,
|
9409 |
+
"loss": 1.01,
|
9410 |
+
"step": 1343
|
9411 |
+
},
|
9412 |
+
{
|
9413 |
+
"epoch": 0.3662624335740564,
|
9414 |
+
"grad_norm": 0.7926869988441467,
|
9415 |
+
"learning_rate": 7.278438429174305e-07,
|
9416 |
+
"loss": 1.0387,
|
9417 |
+
"step": 1344
|
9418 |
+
},
|
9419 |
+
{
|
9420 |
+
"epoch": 0.36653495026570376,
|
9421 |
+
"grad_norm": 0.8616685271263123,
|
9422 |
+
"learning_rate": 7.091060915361269e-07,
|
9423 |
+
"loss": 0.901,
|
9424 |
+
"step": 1345
|
9425 |
+
},
|
9426 |
+
{
|
9427 |
+
"epoch": 0.3668074669573511,
|
9428 |
+
"grad_norm": 0.8358527421951294,
|
9429 |
+
"learning_rate": 6.906109675048345e-07,
|
9430 |
+
"loss": 1.0807,
|
9431 |
+
"step": 1346
|
9432 |
+
},
|
9433 |
+
{
|
9434 |
+
"epoch": 0.3670799836489985,
|
9435 |
+
"grad_norm": 1.00869882106781,
|
9436 |
+
"learning_rate": 6.723585618631456e-07,
|
9437 |
+
"loss": 1.0008,
|
9438 |
+
"step": 1347
|
9439 |
+
},
|
9440 |
+
{
|
9441 |
+
"epoch": 0.36735250034064587,
|
9442 |
+
"grad_norm": 0.9799751043319702,
|
9443 |
+
"learning_rate": 6.543489644559087e-07,
|
9444 |
+
"loss": 1.0247,
|
9445 |
+
"step": 1348
|
9446 |
+
},
|
9447 |
+
{
|
9448 |
+
"epoch": 0.3676250170322932,
|
9449 |
+
"grad_norm": 1.0821982622146606,
|
9450 |
+
"learning_rate": 6.365822639327723e-07,
|
9451 |
+
"loss": 0.9494,
|
9452 |
+
"step": 1349
|
9453 |
+
},
|
9454 |
+
{
|
9455 |
+
"epoch": 0.3678975337239406,
|
9456 |
+
"grad_norm": 1.4081162214279175,
|
9457 |
+
"learning_rate": 6.190585477477806e-07,
|
9458 |
+
"loss": 1.015,
|
9459 |
+
"step": 1350
|
9460 |
+
},
|
9461 |
+
{
|
9462 |
+
"epoch": 0.368170050415588,
|
9463 |
+
"grad_norm": 0.15459007024765015,
|
9464 |
+
"learning_rate": 6.017779021589065e-07,
|
9465 |
+
"loss": 0.334,
|
9466 |
+
"step": 1351
|
9467 |
+
},
|
9468 |
+
{
|
9469 |
+
"epoch": 0.36844256710723533,
|
9470 |
+
"grad_norm": 0.18123799562454224,
|
9471 |
+
"learning_rate": 5.847404122276412e-07,
|
9472 |
+
"loss": 0.5546,
|
9473 |
+
"step": 1352
|
9474 |
+
},
|
9475 |
+
{
|
9476 |
+
"epoch": 0.3687150837988827,
|
9477 |
+
"grad_norm": 0.2544018030166626,
|
9478 |
+
"learning_rate": 5.679461618185944e-07,
|
9479 |
+
"loss": 0.6895,
|
9480 |
+
"step": 1353
|
9481 |
+
},
|
9482 |
+
{
|
9483 |
+
"epoch": 0.36898760049053003,
|
9484 |
+
"grad_norm": 0.19583937525749207,
|
9485 |
+
"learning_rate": 5.513952335990502e-07,
|
9486 |
+
"loss": 0.6937,
|
9487 |
+
"step": 1354
|
9488 |
+
},
|
9489 |
+
{
|
9490 |
+
"epoch": 0.3692601171821774,
|
9491 |
+
"grad_norm": 0.24667081236839294,
|
9492 |
+
"learning_rate": 5.350877090385731e-07,
|
9493 |
+
"loss": 0.788,
|
9494 |
+
"step": 1355
|
9495 |
+
},
|
9496 |
+
{
|
9497 |
+
"epoch": 0.3695326338738248,
|
9498 |
+
"grad_norm": 0.1726841926574707,
|
9499 |
+
"learning_rate": 5.190236684086136e-07,
|
9500 |
+
"loss": 0.5971,
|
9501 |
+
"step": 1356
|
9502 |
+
},
|
9503 |
+
{
|
9504 |
+
"epoch": 0.36980515056547214,
|
9505 |
+
"grad_norm": 0.1916254162788391,
|
9506 |
+
"learning_rate": 5.032031907821089e-07,
|
9507 |
+
"loss": 0.544,
|
9508 |
+
"step": 1357
|
9509 |
+
},
|
9510 |
+
{
|
9511 |
+
"epoch": 0.3700776672571195,
|
9512 |
+
"grad_norm": 0.16045045852661133,
|
9513 |
+
"learning_rate": 4.876263540330828e-07,
|
9514 |
+
"loss": 0.5502,
|
9515 |
+
"step": 1358
|
9516 |
+
},
|
9517 |
+
{
|
9518 |
+
"epoch": 0.37035018394876684,
|
9519 |
+
"grad_norm": 0.19948673248291016,
|
9520 |
+
"learning_rate": 4.722932348362852e-07,
|
9521 |
+
"loss": 0.6865,
|
9522 |
+
"step": 1359
|
9523 |
+
},
|
9524 |
+
{
|
9525 |
+
"epoch": 0.37062270064041425,
|
9526 |
+
"grad_norm": 0.21819989383220673,
|
9527 |
+
"learning_rate": 4.572039086667923e-07,
|
9528 |
+
"loss": 0.8359,
|
9529 |
+
"step": 1360
|
9530 |
+
},
|
9531 |
+
{
|
9532 |
+
"epoch": 0.3708952173320616,
|
9533 |
+
"grad_norm": 0.21285250782966614,
|
9534 |
+
"learning_rate": 4.423584497996458e-07,
|
9535 |
+
"loss": 0.6576,
|
9536 |
+
"step": 1361
|
9537 |
+
},
|
9538 |
+
{
|
9539 |
+
"epoch": 0.37116773402370895,
|
9540 |
+
"grad_norm": 0.235045924782753,
|
9541 |
+
"learning_rate": 4.277569313094809e-07,
|
9542 |
+
"loss": 0.7278,
|
9543 |
+
"step": 1362
|
9544 |
+
},
|
9545 |
+
{
|
9546 |
+
"epoch": 0.3714402507153563,
|
9547 |
+
"grad_norm": 0.2244848608970642,
|
9548 |
+
"learning_rate": 4.1339942507018225e-07,
|
9549 |
+
"loss": 0.7205,
|
9550 |
+
"step": 1363
|
9551 |
+
},
|
9552 |
+
{
|
9553 |
+
"epoch": 0.37171276740700365,
|
9554 |
+
"grad_norm": 0.22510984539985657,
|
9555 |
+
"learning_rate": 3.9928600175451193e-07,
|
9556 |
+
"loss": 0.758,
|
9557 |
+
"step": 1364
|
9558 |
+
},
|
9559 |
+
{
|
9560 |
+
"epoch": 0.37198528409865106,
|
9561 |
+
"grad_norm": 0.2502443194389343,
|
9562 |
+
"learning_rate": 3.854167308337708e-07,
|
9563 |
+
"loss": 0.7937,
|
9564 |
+
"step": 1365
|
9565 |
+
},
|
9566 |
+
{
|
9567 |
+
"epoch": 0.3722578007902984,
|
9568 |
+
"grad_norm": 0.2651815414428711,
|
9569 |
+
"learning_rate": 3.717916805774602e-07,
|
9570 |
+
"loss": 0.7462,
|
9571 |
+
"step": 1366
|
9572 |
+
},
|
9573 |
+
{
|
9574 |
+
"epoch": 0.37253031748194576,
|
9575 |
+
"grad_norm": 0.2717399001121521,
|
9576 |
+
"learning_rate": 3.584109180529205e-07,
|
9577 |
+
"loss": 0.8509,
|
9578 |
+
"step": 1367
|
9579 |
+
},
|
9580 |
+
{
|
9581 |
+
"epoch": 0.3728028341735931,
|
9582 |
+
"grad_norm": 0.2546311616897583,
|
9583 |
+
"learning_rate": 3.452745091250431e-07,
|
9584 |
+
"loss": 0.8024,
|
9585 |
+
"step": 1368
|
9586 |
+
},
|
9587 |
+
{
|
9588 |
+
"epoch": 0.3730753508652405,
|
9589 |
+
"grad_norm": 0.26162171363830566,
|
9590 |
+
"learning_rate": 3.323825184559204e-07,
|
9591 |
+
"loss": 0.8083,
|
9592 |
+
"step": 1369
|
9593 |
+
},
|
9594 |
+
{
|
9595 |
+
"epoch": 0.37334786755688787,
|
9596 |
+
"grad_norm": 0.21666046977043152,
|
9597 |
+
"learning_rate": 3.1973500950451265e-07,
|
9598 |
+
"loss": 0.6353,
|
9599 |
+
"step": 1370
|
9600 |
+
},
|
9601 |
+
{
|
9602 |
+
"epoch": 0.3736203842485352,
|
9603 |
+
"grad_norm": 0.2568245530128479,
|
9604 |
+
"learning_rate": 3.0733204452638163e-07,
|
9605 |
+
"loss": 0.7984,
|
9606 |
+
"step": 1371
|
9607 |
+
},
|
9608 |
+
{
|
9609 |
+
"epoch": 0.37389290094018257,
|
9610 |
+
"grad_norm": 0.2753666043281555,
|
9611 |
+
"learning_rate": 2.9517368457332996e-07,
|
9612 |
+
"loss": 0.77,
|
9613 |
+
"step": 1372
|
9614 |
+
},
|
9615 |
+
{
|
9616 |
+
"epoch": 0.3741654176318299,
|
9617 |
+
"grad_norm": 0.2763117849826813,
|
9618 |
+
"learning_rate": 2.8325998949314536e-07,
|
9619 |
+
"loss": 0.7601,
|
9620 |
+
"step": 1373
|
9621 |
+
},
|
9622 |
+
{
|
9623 |
+
"epoch": 0.37443793432347733,
|
9624 |
+
"grad_norm": 0.2768275737762451,
|
9625 |
+
"learning_rate": 2.715910179292791e-07,
|
9626 |
+
"loss": 0.7987,
|
9627 |
+
"step": 1374
|
9628 |
+
},
|
9629 |
+
{
|
9630 |
+
"epoch": 0.3747104510151247,
|
9631 |
+
"grad_norm": 0.28381019830703735,
|
9632 |
+
"learning_rate": 2.601668273205737e-07,
|
9633 |
+
"loss": 0.784,
|
9634 |
+
"step": 1375
|
9635 |
+
},
|
9636 |
+
{
|
9637 |
+
"epoch": 0.37498296770677203,
|
9638 |
+
"grad_norm": 0.3076533377170563,
|
9639 |
+
"learning_rate": 2.4898747390095787e-07,
|
9640 |
+
"loss": 0.7481,
|
9641 |
+
"step": 1376
|
9642 |
+
},
|
9643 |
+
{
|
9644 |
+
"epoch": 0.3752554843984194,
|
9645 |
+
"grad_norm": 0.30070942640304565,
|
9646 |
+
"learning_rate": 2.3805301269920755e-07,
|
9647 |
+
"loss": 0.7292,
|
9648 |
+
"step": 1377
|
9649 |
+
},
|
9650 |
+
{
|
9651 |
+
"epoch": 0.3755280010900668,
|
9652 |
+
"grad_norm": 0.28088441491127014,
|
9653 |
+
"learning_rate": 2.2736349753862963e-07,
|
9654 |
+
"loss": 0.8749,
|
9655 |
+
"step": 1378
|
9656 |
+
},
|
9657 |
+
{
|
9658 |
+
"epoch": 0.37580051778171414,
|
9659 |
+
"grad_norm": 0.3044433891773224,
|
9660 |
+
"learning_rate": 2.1691898103682883e-07,
|
9661 |
+
"loss": 0.7314,
|
9662 |
+
"step": 1379
|
9663 |
+
},
|
9664 |
+
{
|
9665 |
+
"epoch": 0.3760730344733615,
|
9666 |
+
"grad_norm": 0.31092631816864014,
|
9667 |
+
"learning_rate": 2.0671951460544126e-07,
|
9668 |
+
"loss": 0.871,
|
9669 |
+
"step": 1380
|
9670 |
+
},
|
9671 |
+
{
|
9672 |
+
"epoch": 0.37634555116500884,
|
9673 |
+
"grad_norm": 0.3396849036216736,
|
9674 |
+
"learning_rate": 1.9676514844987337e-07,
|
9675 |
+
"loss": 0.852,
|
9676 |
+
"step": 1381
|
9677 |
+
},
|
9678 |
+
{
|
9679 |
+
"epoch": 0.37661806785665625,
|
9680 |
+
"grad_norm": 0.304912269115448,
|
9681 |
+
"learning_rate": 1.8705593156906343e-07,
|
9682 |
+
"loss": 0.8278,
|
9683 |
+
"step": 1382
|
9684 |
+
},
|
9685 |
+
{
|
9686 |
+
"epoch": 0.3768905845483036,
|
9687 |
+
"grad_norm": 0.3502468466758728,
|
9688 |
+
"learning_rate": 1.775919117552427e-07,
|
9689 |
+
"loss": 0.808,
|
9690 |
+
"step": 1383
|
9691 |
+
},
|
9692 |
+
{
|
9693 |
+
"epoch": 0.37716310123995095,
|
9694 |
+
"grad_norm": 0.3081386685371399,
|
9695 |
+
"learning_rate": 1.6837313559368572e-07,
|
9696 |
+
"loss": 0.7853,
|
9697 |
+
"step": 1384
|
9698 |
+
},
|
9699 |
+
{
|
9700 |
+
"epoch": 0.3774356179315983,
|
9701 |
+
"grad_norm": 0.4822728633880615,
|
9702 |
+
"learning_rate": 1.5939964846249378e-07,
|
9703 |
+
"loss": 0.8926,
|
9704 |
+
"step": 1385
|
9705 |
+
},
|
9706 |
+
{
|
9707 |
+
"epoch": 0.37770813462324565,
|
9708 |
+
"grad_norm": 0.377630352973938,
|
9709 |
+
"learning_rate": 1.5067149453237284e-07,
|
9710 |
+
"loss": 0.8901,
|
9711 |
+
"step": 1386
|
9712 |
+
},
|
9713 |
+
{
|
9714 |
+
"epoch": 0.37798065131489306,
|
9715 |
+
"grad_norm": 0.4227350652217865,
|
9716 |
+
"learning_rate": 1.42188716766406e-07,
|
9717 |
+
"loss": 0.7943,
|
9718 |
+
"step": 1387
|
9719 |
+
},
|
9720 |
+
{
|
9721 |
+
"epoch": 0.3782531680065404,
|
9722 |
+
"grad_norm": 0.3976781368255615,
|
9723 |
+
"learning_rate": 1.3395135691985361e-07,
|
9724 |
+
"loss": 0.8125,
|
9725 |
+
"step": 1388
|
9726 |
+
},
|
9727 |
+
{
|
9728 |
+
"epoch": 0.37852568469818776,
|
9729 |
+
"grad_norm": 0.419124037027359,
|
9730 |
+
"learning_rate": 1.2595945553992573e-07,
|
9731 |
+
"loss": 0.8715,
|
9732 |
+
"step": 1389
|
9733 |
+
},
|
9734 |
+
{
|
9735 |
+
"epoch": 0.3787982013898351,
|
9736 |
+
"grad_norm": 0.5759310722351074,
|
9737 |
+
"learning_rate": 1.1821305196562104e-07,
|
9738 |
+
"loss": 0.8559,
|
9739 |
+
"step": 1390
|
9740 |
+
},
|
9741 |
+
{
|
9742 |
+
"epoch": 0.3790707180814825,
|
9743 |
+
"grad_norm": 0.481563001871109,
|
9744 |
+
"learning_rate": 1.1071218432749941e-07,
|
9745 |
+
"loss": 0.8058,
|
9746 |
+
"step": 1391
|
9747 |
+
},
|
9748 |
+
{
|
9749 |
+
"epoch": 0.37934323477312987,
|
9750 |
+
"grad_norm": 0.5830849409103394,
|
9751 |
+
"learning_rate": 1.03456889547493e-07,
|
9752 |
+
"loss": 0.8477,
|
9753 |
+
"step": 1392
|
9754 |
+
},
|
9755 |
+
{
|
9756 |
+
"epoch": 0.3796157514647772,
|
9757 |
+
"grad_norm": 0.47717374563217163,
|
9758 |
+
"learning_rate": 9.6447203338762e-08,
|
9759 |
+
"loss": 0.8508,
|
9760 |
+
"step": 1393
|
9761 |
+
},
|
9762 |
+
{
|
9763 |
+
"epoch": 0.37988826815642457,
|
9764 |
+
"grad_norm": 0.6384487152099609,
|
9765 |
+
"learning_rate": 8.968316020547262e-08,
|
9766 |
+
"loss": 0.9627,
|
9767 |
+
"step": 1394
|
9768 |
+
},
|
9769 |
+
{
|
9770 |
+
"epoch": 0.3801607848480719,
|
9771 |
+
"grad_norm": 0.5922986268997192,
|
9772 |
+
"learning_rate": 8.316479344266382e-08,
|
9773 |
+
"loss": 1.0394,
|
9774 |
+
"step": 1395
|
9775 |
+
},
|
9776 |
+
{
|
9777 |
+
"epoch": 0.3804333015397193,
|
9778 |
+
"grad_norm": 0.652921199798584,
|
9779 |
+
"learning_rate": 7.689213513605298e-08,
|
9780 |
+
"loss": 1.0299,
|
9781 |
+
"step": 1396
|
9782 |
+
},
|
9783 |
+
{
|
9784 |
+
"epoch": 0.3807058182313667,
|
9785 |
+
"grad_norm": 0.8793203830718994,
|
9786 |
+
"learning_rate": 7.086521616190279e-08,
|
9787 |
+
"loss": 1.0732,
|
9788 |
+
"step": 1397
|
9789 |
+
},
|
9790 |
+
{
|
9791 |
+
"epoch": 0.38097833492301403,
|
9792 |
+
"grad_norm": 0.6398942470550537,
|
9793 |
+
"learning_rate": 6.508406618686014e-08,
|
9794 |
+
"loss": 0.724,
|
9795 |
+
"step": 1398
|
9796 |
+
},
|
9797 |
+
{
|
9798 |
+
"epoch": 0.3812508516146614,
|
9799 |
+
"grad_norm": 0.9504656195640564,
|
9800 |
+
"learning_rate": 5.954871366779524e-08,
|
9801 |
+
"loss": 0.9419,
|
9802 |
+
"step": 1399
|
9803 |
+
},
|
9804 |
+
{
|
9805 |
+
"epoch": 0.3815233683063088,
|
9806 |
+
"grad_norm": 1.257886290550232,
|
9807 |
+
"learning_rate": 5.4259185851701646e-08,
|
9808 |
+
"loss": 0.998,
|
9809 |
+
"step": 1400
|
9810 |
+
},
|
9811 |
+
{
|
9812 |
+
"epoch": 0.38179588499795614,
|
9813 |
+
"grad_norm": 0.2546479105949402,
|
9814 |
+
"learning_rate": 4.921550877550751e-08,
|
9815 |
+
"loss": 0.5172,
|
9816 |
+
"step": 1401
|
9817 |
+
},
|
9818 |
+
{
|
9819 |
+
"epoch": 0.3820684016896035,
|
9820 |
+
"grad_norm": 0.2128339409828186,
|
9821 |
+
"learning_rate": 4.441770726599792e-08,
|
9822 |
+
"loss": 0.6078,
|
9823 |
+
"step": 1402
|
9824 |
+
},
|
9825 |
+
{
|
9826 |
+
"epoch": 0.38234091838125084,
|
9827 |
+
"grad_norm": 0.19867826998233795,
|
9828 |
+
"learning_rate": 3.986580493965941e-08,
|
9829 |
+
"loss": 0.5305,
|
9830 |
+
"step": 1403
|
9831 |
+
},
|
9832 |
+
{
|
9833 |
+
"epoch": 0.3826134350728982,
|
9834 |
+
"grad_norm": 0.172965869307518,
|
9835 |
+
"learning_rate": 3.555982420257453e-08,
|
9836 |
+
"loss": 0.5478,
|
9837 |
+
"step": 1404
|
9838 |
+
},
|
9839 |
+
{
|
9840 |
+
"epoch": 0.3828859517645456,
|
9841 |
+
"grad_norm": 0.1612936407327652,
|
9842 |
+
"learning_rate": 3.149978625032191e-08,
|
9843 |
+
"loss": 0.5748,
|
9844 |
+
"step": 1405
|
9845 |
+
},
|
9846 |
+
{
|
9847 |
+
"epoch": 0.38315846845619295,
|
9848 |
+
"grad_norm": 0.20216292142868042,
|
9849 |
+
"learning_rate": 2.7685711067848564e-08,
|
9850 |
+
"loss": 0.5429,
|
9851 |
+
"step": 1406
|
9852 |
+
},
|
9853 |
+
{
|
9854 |
+
"epoch": 0.3834309851478403,
|
9855 |
+
"grad_norm": 0.2009732723236084,
|
9856 |
+
"learning_rate": 2.4117617429397776e-08,
|
9857 |
+
"loss": 0.6402,
|
9858 |
+
"step": 1407
|
9859 |
+
},
|
9860 |
+
{
|
9861 |
+
"epoch": 0.38370350183948765,
|
9862 |
+
"grad_norm": 0.1839277148246765,
|
9863 |
+
"learning_rate": 2.0795522898392482e-08,
|
9864 |
+
"loss": 0.5503,
|
9865 |
+
"step": 1408
|
9866 |
+
},
|
9867 |
+
{
|
9868 |
+
"epoch": 0.38397601853113505,
|
9869 |
+
"grad_norm": 0.22925753891468048,
|
9870 |
+
"learning_rate": 1.7719443827368677e-08,
|
9871 |
+
"loss": 0.7,
|
9872 |
+
"step": 1409
|
9873 |
+
},
|
9874 |
+
{
|
9875 |
+
"epoch": 0.3842485352227824,
|
9876 |
+
"grad_norm": 0.2548539936542511,
|
9877 |
+
"learning_rate": 1.4889395357892133e-08,
|
9878 |
+
"loss": 0.8137,
|
9879 |
+
"step": 1410
|
9880 |
+
},
|
9881 |
+
{
|
9882 |
+
"epoch": 0.38452105191442976,
|
9883 |
+
"grad_norm": 0.23133116960525513,
|
9884 |
+
"learning_rate": 1.2305391420458502e-08,
|
9885 |
+
"loss": 0.7484,
|
9886 |
+
"step": 1411
|
9887 |
+
},
|
9888 |
+
{
|
9889 |
+
"epoch": 0.3847935686060771,
|
9890 |
+
"grad_norm": 0.2605571746826172,
|
9891 |
+
"learning_rate": 9.967444734459986e-09,
|
9892 |
+
"loss": 0.8134,
|
9893 |
+
"step": 1412
|
9894 |
+
},
|
9895 |
+
{
|
9896 |
+
"epoch": 0.38506608529772446,
|
9897 |
+
"grad_norm": 0.24061955511569977,
|
9898 |
+
"learning_rate": 7.875566808107637e-09,
|
9899 |
+
"loss": 0.7054,
|
9900 |
+
"step": 1413
|
9901 |
+
},
|
9902 |
+
{
|
9903 |
+
"epoch": 0.38533860198937187,
|
9904 |
+
"grad_norm": 0.27078038454055786,
|
9905 |
+
"learning_rate": 6.029767938364739e-09,
|
9906 |
+
"loss": 0.6806,
|
9907 |
+
"step": 1414
|
9908 |
+
},
|
9909 |
+
{
|
9910 |
+
"epoch": 0.3856111186810192,
|
9911 |
+
"grad_norm": 0.2434467077255249,
|
9912 |
+
"learning_rate": 4.4300572109134966e-09,
|
9913 |
+
"loss": 0.783,
|
9914 |
+
"step": 1415
|
9915 |
+
},
|
9916 |
+
{
|
9917 |
+
"epoch": 0.38588363537266657,
|
9918 |
+
"grad_norm": 0.21693919599056244,
|
9919 |
+
"learning_rate": 3.0764425000995302e-09,
|
9920 |
+
"loss": 0.7023,
|
9921 |
+
"step": 1416
|
9922 |
+
},
|
9923 |
+
{
|
9924 |
+
"epoch": 0.3861561520643139,
|
9925 |
+
"grad_norm": 0.29057759046554565,
|
9926 |
+
"learning_rate": 1.9689304688985664e-09,
|
9927 |
+
"loss": 0.7731,
|
9928 |
+
"step": 1417
|
9929 |
+
},
|
9930 |
+
{
|
9931 |
+
"epoch": 0.3864286687559613,
|
9932 |
+
"grad_norm": 0.2739744782447815,
|
9933 |
+
"learning_rate": 1.1075265688775815e-09,
|
9934 |
+
"loss": 0.7776,
|
9935 |
+
"step": 1418
|
9936 |
+
},
|
9937 |
+
{
|
9938 |
+
"epoch": 0.3867011854476087,
|
9939 |
+
"grad_norm": 0.32430922985076904,
|
9940 |
+
"learning_rate": 4.922350401781461e-10,
|
9941 |
+
"loss": 0.771,
|
9942 |
+
"step": 1419
|
9943 |
+
},
|
9944 |
+
{
|
9945 |
+
"epoch": 0.386973702139256,
|
9946 |
+
"grad_norm": 0.2674165368080139,
|
9947 |
+
"learning_rate": 1.2305891147756933e-10,
|
9948 |
+
"loss": 0.811,
|
9949 |
+
"step": 1420
|
9950 |
+
},
|
9951 |
+
{
|
9952 |
+
"epoch": 0.3872462188309034,
|
9953 |
+
"grad_norm": 0.3200191259384155,
|
9954 |
+
"learning_rate": 0.0,
|
9955 |
+
"loss": 0.8542,
|
9956 |
+
"step": 1421
|
9957 |
}
|
9958 |
],
|
9959 |
"logging_steps": 1,
|
|
|
9968 |
"should_evaluate": false,
|
9969 |
"should_log": false,
|
9970 |
"should_save": true,
|
9971 |
+
"should_training_stop": true
|
9972 |
},
|
9973 |
"attributes": {}
|
9974 |
}
|
9975 |
},
|
9976 |
+
"total_flos": 1.6718563698435686e+17,
|
9977 |
"train_batch_size": 4,
|
9978 |
"trial_name": null,
|
9979 |
"trial_params": null
|