Training in progress, step 1458, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 27024
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:77a35ddae5a49184effccacd48dc1aef49a51c66a406eb600e8cf8dcf817f3f9
|
3 |
size 27024
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 64038
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cc2468ccfae1297a3602d5a797ee4aa3aeddeb1178e1d7cb22d2c039b1d48beb
|
3 |
size 64038
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:da2f3736139f86eb2e423375789bde32fa05e4661b3cd2bee766195f97ade183
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4acbeea1043695671c13f1f50a710455de5a6bb5bf140a0c008c93a72ba23b2e
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 365,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -7704,6 +7704,2547 @@
|
|
7704 |
"eval_samples_per_second": 283.315,
|
7705 |
"eval_steps_per_second": 141.8,
|
7706 |
"step": 1095
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7707 |
}
|
7708 |
],
|
7709 |
"logging_steps": 1,
|
@@ -7718,12 +10259,12 @@
|
|
7718 |
"should_evaluate": false,
|
7719 |
"should_log": false,
|
7720 |
"should_save": true,
|
7721 |
-
"should_training_stop":
|
7722 |
},
|
7723 |
"attributes": {}
|
7724 |
}
|
7725 |
},
|
7726 |
-
"total_flos":
|
7727 |
"train_batch_size": 2,
|
7728 |
"trial_name": null,
|
7729 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.6174695606140815,
|
5 |
"eval_steps": 365,
|
6 |
+
"global_step": 1458,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
7704 |
"eval_samples_per_second": 283.315,
|
7705 |
"eval_steps_per_second": 141.8,
|
7706 |
"step": 1095
|
7707 |
+
},
|
7708 |
+
{
|
7709 |
+
"epoch": 0.4641609317098994,
|
7710 |
+
"grad_norm": 0.03839697316288948,
|
7711 |
+
"learning_rate": 2.9289321881345254e-05,
|
7712 |
+
"loss": 10.3289,
|
7713 |
+
"step": 1096
|
7714 |
+
},
|
7715 |
+
{
|
7716 |
+
"epoch": 0.46458443620963474,
|
7717 |
+
"grad_norm": 0.044403206557035446,
|
7718 |
+
"learning_rate": 2.913607396018283e-05,
|
7719 |
+
"loss": 10.3282,
|
7720 |
+
"step": 1097
|
7721 |
+
},
|
7722 |
+
{
|
7723 |
+
"epoch": 0.46500794070937,
|
7724 |
+
"grad_norm": 0.04016818851232529,
|
7725 |
+
"learning_rate": 2.8983159609539635e-05,
|
7726 |
+
"loss": 10.3326,
|
7727 |
+
"step": 1098
|
7728 |
+
},
|
7729 |
+
{
|
7730 |
+
"epoch": 0.46543144520910534,
|
7731 |
+
"grad_norm": 0.036192361265420914,
|
7732 |
+
"learning_rate": 2.883057954921379e-05,
|
7733 |
+
"loss": 10.3341,
|
7734 |
+
"step": 1099
|
7735 |
+
},
|
7736 |
+
{
|
7737 |
+
"epoch": 0.46585494970884067,
|
7738 |
+
"grad_norm": 0.03316795825958252,
|
7739 |
+
"learning_rate": 2.8678334497429804e-05,
|
7740 |
+
"loss": 10.3307,
|
7741 |
+
"step": 1100
|
7742 |
+
},
|
7743 |
+
{
|
7744 |
+
"epoch": 0.46627845420857594,
|
7745 |
+
"grad_norm": 0.04433278739452362,
|
7746 |
+
"learning_rate": 2.8526425170835304e-05,
|
7747 |
+
"loss": 10.3269,
|
7748 |
+
"step": 1101
|
7749 |
+
},
|
7750 |
+
{
|
7751 |
+
"epoch": 0.46670195870831127,
|
7752 |
+
"grad_norm": 0.042218469083309174,
|
7753 |
+
"learning_rate": 2.8374852284497446e-05,
|
7754 |
+
"loss": 10.3234,
|
7755 |
+
"step": 1102
|
7756 |
+
},
|
7757 |
+
{
|
7758 |
+
"epoch": 0.4671254632080466,
|
7759 |
+
"grad_norm": 0.035935208201408386,
|
7760 |
+
"learning_rate": 2.82236165518999e-05,
|
7761 |
+
"loss": 10.3236,
|
7762 |
+
"step": 1103
|
7763 |
+
},
|
7764 |
+
{
|
7765 |
+
"epoch": 0.46754896770778187,
|
7766 |
+
"grad_norm": 0.04235940799117088,
|
7767 |
+
"learning_rate": 2.8072718684939104e-05,
|
7768 |
+
"loss": 10.331,
|
7769 |
+
"step": 1104
|
7770 |
+
},
|
7771 |
+
{
|
7772 |
+
"epoch": 0.4679724722075172,
|
7773 |
+
"grad_norm": 0.037976741790771484,
|
7774 |
+
"learning_rate": 2.79221593939212e-05,
|
7775 |
+
"loss": 10.3276,
|
7776 |
+
"step": 1105
|
7777 |
+
},
|
7778 |
+
{
|
7779 |
+
"epoch": 0.4683959767072525,
|
7780 |
+
"grad_norm": 0.03852176293730736,
|
7781 |
+
"learning_rate": 2.7771939387558554e-05,
|
7782 |
+
"loss": 10.3299,
|
7783 |
+
"step": 1106
|
7784 |
+
},
|
7785 |
+
{
|
7786 |
+
"epoch": 0.4688194812069878,
|
7787 |
+
"grad_norm": 0.041406646370887756,
|
7788 |
+
"learning_rate": 2.7622059372966447e-05,
|
7789 |
+
"loss": 10.332,
|
7790 |
+
"step": 1107
|
7791 |
+
},
|
7792 |
+
{
|
7793 |
+
"epoch": 0.4692429857067231,
|
7794 |
+
"grad_norm": 0.04931745305657387,
|
7795 |
+
"learning_rate": 2.7472520055659768e-05,
|
7796 |
+
"loss": 10.3257,
|
7797 |
+
"step": 1108
|
7798 |
+
},
|
7799 |
+
{
|
7800 |
+
"epoch": 0.46966649020645845,
|
7801 |
+
"grad_norm": 0.05635618790984154,
|
7802 |
+
"learning_rate": 2.7323322139549667e-05,
|
7803 |
+
"loss": 10.3321,
|
7804 |
+
"step": 1109
|
7805 |
+
},
|
7806 |
+
{
|
7807 |
+
"epoch": 0.4700899947061937,
|
7808 |
+
"grad_norm": 0.029194293543696404,
|
7809 |
+
"learning_rate": 2.717446632694025e-05,
|
7810 |
+
"loss": 10.3269,
|
7811 |
+
"step": 1110
|
7812 |
+
},
|
7813 |
+
{
|
7814 |
+
"epoch": 0.47051349920592905,
|
7815 |
+
"grad_norm": 0.0442640595138073,
|
7816 |
+
"learning_rate": 2.7025953318525276e-05,
|
7817 |
+
"loss": 10.3257,
|
7818 |
+
"step": 1111
|
7819 |
+
},
|
7820 |
+
{
|
7821 |
+
"epoch": 0.4709370037056644,
|
7822 |
+
"grad_norm": 0.03716416284441948,
|
7823 |
+
"learning_rate": 2.6877783813384894e-05,
|
7824 |
+
"loss": 10.3269,
|
7825 |
+
"step": 1112
|
7826 |
+
},
|
7827 |
+
{
|
7828 |
+
"epoch": 0.4713605082053997,
|
7829 |
+
"grad_norm": 0.039771221578121185,
|
7830 |
+
"learning_rate": 2.67299585089822e-05,
|
7831 |
+
"loss": 10.3334,
|
7832 |
+
"step": 1113
|
7833 |
+
},
|
7834 |
+
{
|
7835 |
+
"epoch": 0.471784012705135,
|
7836 |
+
"grad_norm": 0.04597123712301254,
|
7837 |
+
"learning_rate": 2.6582478101160167e-05,
|
7838 |
+
"loss": 10.3293,
|
7839 |
+
"step": 1114
|
7840 |
+
},
|
7841 |
+
{
|
7842 |
+
"epoch": 0.4722075172048703,
|
7843 |
+
"grad_norm": 0.03897184506058693,
|
7844 |
+
"learning_rate": 2.6435343284138325e-05,
|
7845 |
+
"loss": 10.3333,
|
7846 |
+
"step": 1115
|
7847 |
+
},
|
7848 |
+
{
|
7849 |
+
"epoch": 0.47263102170460564,
|
7850 |
+
"grad_norm": 0.03779776394367218,
|
7851 |
+
"learning_rate": 2.6288554750509286e-05,
|
7852 |
+
"loss": 10.3295,
|
7853 |
+
"step": 1116
|
7854 |
+
},
|
7855 |
+
{
|
7856 |
+
"epoch": 0.4730545262043409,
|
7857 |
+
"grad_norm": 0.03770693764090538,
|
7858 |
+
"learning_rate": 2.614211319123575e-05,
|
7859 |
+
"loss": 10.3289,
|
7860 |
+
"step": 1117
|
7861 |
+
},
|
7862 |
+
{
|
7863 |
+
"epoch": 0.47347803070407624,
|
7864 |
+
"grad_norm": 0.04091110825538635,
|
7865 |
+
"learning_rate": 2.599601929564709e-05,
|
7866 |
+
"loss": 10.3261,
|
7867 |
+
"step": 1118
|
7868 |
+
},
|
7869 |
+
{
|
7870 |
+
"epoch": 0.47390153520381156,
|
7871 |
+
"grad_norm": 0.04154812917113304,
|
7872 |
+
"learning_rate": 2.5850273751436204e-05,
|
7873 |
+
"loss": 10.3242,
|
7874 |
+
"step": 1119
|
7875 |
+
},
|
7876 |
+
{
|
7877 |
+
"epoch": 0.47432503970354684,
|
7878 |
+
"grad_norm": 0.040770817548036575,
|
7879 |
+
"learning_rate": 2.57048772446562e-05,
|
7880 |
+
"loss": 10.3314,
|
7881 |
+
"step": 1120
|
7882 |
+
},
|
7883 |
+
{
|
7884 |
+
"epoch": 0.47474854420328216,
|
7885 |
+
"grad_norm": 0.04216192662715912,
|
7886 |
+
"learning_rate": 2.5559830459717138e-05,
|
7887 |
+
"loss": 10.3294,
|
7888 |
+
"step": 1121
|
7889 |
+
},
|
7890 |
+
{
|
7891 |
+
"epoch": 0.4751720487030175,
|
7892 |
+
"grad_norm": 0.03507891669869423,
|
7893 |
+
"learning_rate": 2.5415134079383006e-05,
|
7894 |
+
"loss": 10.3299,
|
7895 |
+
"step": 1122
|
7896 |
+
},
|
7897 |
+
{
|
7898 |
+
"epoch": 0.47559555320275276,
|
7899 |
+
"grad_norm": 0.04350593313574791,
|
7900 |
+
"learning_rate": 2.5270788784768283e-05,
|
7901 |
+
"loss": 10.3273,
|
7902 |
+
"step": 1123
|
7903 |
+
},
|
7904 |
+
{
|
7905 |
+
"epoch": 0.4760190577024881,
|
7906 |
+
"grad_norm": 0.03649619594216347,
|
7907 |
+
"learning_rate": 2.512679525533479e-05,
|
7908 |
+
"loss": 10.321,
|
7909 |
+
"step": 1124
|
7910 |
+
},
|
7911 |
+
{
|
7912 |
+
"epoch": 0.4764425622022234,
|
7913 |
+
"grad_norm": 0.039560846984386444,
|
7914 |
+
"learning_rate": 2.4983154168888547e-05,
|
7915 |
+
"loss": 10.3373,
|
7916 |
+
"step": 1125
|
7917 |
+
},
|
7918 |
+
{
|
7919 |
+
"epoch": 0.4768660667019587,
|
7920 |
+
"grad_norm": 0.03292880579829216,
|
7921 |
+
"learning_rate": 2.4839866201576646e-05,
|
7922 |
+
"loss": 10.3277,
|
7923 |
+
"step": 1126
|
7924 |
+
},
|
7925 |
+
{
|
7926 |
+
"epoch": 0.477289571201694,
|
7927 |
+
"grad_norm": 0.043432679027318954,
|
7928 |
+
"learning_rate": 2.469693202788381e-05,
|
7929 |
+
"loss": 10.3316,
|
7930 |
+
"step": 1127
|
7931 |
+
},
|
7932 |
+
{
|
7933 |
+
"epoch": 0.47771307570142935,
|
7934 |
+
"grad_norm": 0.0317220538854599,
|
7935 |
+
"learning_rate": 2.4554352320629525e-05,
|
7936 |
+
"loss": 10.3289,
|
7937 |
+
"step": 1128
|
7938 |
+
},
|
7939 |
+
{
|
7940 |
+
"epoch": 0.4781365802011646,
|
7941 |
+
"grad_norm": 0.03324584290385246,
|
7942 |
+
"learning_rate": 2.4412127750964642e-05,
|
7943 |
+
"loss": 10.3319,
|
7944 |
+
"step": 1129
|
7945 |
+
},
|
7946 |
+
{
|
7947 |
+
"epoch": 0.47856008470089995,
|
7948 |
+
"grad_norm": 0.047287166118621826,
|
7949 |
+
"learning_rate": 2.4270258988368376e-05,
|
7950 |
+
"loss": 10.3279,
|
7951 |
+
"step": 1130
|
7952 |
+
},
|
7953 |
+
{
|
7954 |
+
"epoch": 0.4789835892006353,
|
7955 |
+
"grad_norm": 0.04056520760059357,
|
7956 |
+
"learning_rate": 2.412874670064508e-05,
|
7957 |
+
"loss": 10.3288,
|
7958 |
+
"step": 1131
|
7959 |
+
},
|
7960 |
+
{
|
7961 |
+
"epoch": 0.47940709370037055,
|
7962 |
+
"grad_norm": 0.04232284054160118,
|
7963 |
+
"learning_rate": 2.3987591553920997e-05,
|
7964 |
+
"loss": 10.3255,
|
7965 |
+
"step": 1132
|
7966 |
+
},
|
7967 |
+
{
|
7968 |
+
"epoch": 0.4798305982001059,
|
7969 |
+
"grad_norm": 0.04037028178572655,
|
7970 |
+
"learning_rate": 2.384679421264142e-05,
|
7971 |
+
"loss": 10.3265,
|
7972 |
+
"step": 1133
|
7973 |
+
},
|
7974 |
+
{
|
7975 |
+
"epoch": 0.4802541026998412,
|
7976 |
+
"grad_norm": 0.040380604565143585,
|
7977 |
+
"learning_rate": 2.3706355339567286e-05,
|
7978 |
+
"loss": 10.3252,
|
7979 |
+
"step": 1134
|
7980 |
+
},
|
7981 |
+
{
|
7982 |
+
"epoch": 0.4806776071995765,
|
7983 |
+
"grad_norm": 0.05329408869147301,
|
7984 |
+
"learning_rate": 2.3566275595772102e-05,
|
7985 |
+
"loss": 10.3339,
|
7986 |
+
"step": 1135
|
7987 |
+
},
|
7988 |
+
{
|
7989 |
+
"epoch": 0.4811011116993118,
|
7990 |
+
"grad_norm": 0.045708801597356796,
|
7991 |
+
"learning_rate": 2.3426555640638927e-05,
|
7992 |
+
"loss": 10.3291,
|
7993 |
+
"step": 1136
|
7994 |
+
},
|
7995 |
+
{
|
7996 |
+
"epoch": 0.48152461619904713,
|
7997 |
+
"grad_norm": 0.03992190584540367,
|
7998 |
+
"learning_rate": 2.3287196131857326e-05,
|
7999 |
+
"loss": 10.3274,
|
8000 |
+
"step": 1137
|
8001 |
+
},
|
8002 |
+
{
|
8003 |
+
"epoch": 0.4819481206987824,
|
8004 |
+
"grad_norm": 0.03594038262963295,
|
8005 |
+
"learning_rate": 2.3148197725419983e-05,
|
8006 |
+
"loss": 10.3302,
|
8007 |
+
"step": 1138
|
8008 |
+
},
|
8009 |
+
{
|
8010 |
+
"epoch": 0.48237162519851773,
|
8011 |
+
"grad_norm": 0.04509597271680832,
|
8012 |
+
"learning_rate": 2.3009561075619924e-05,
|
8013 |
+
"loss": 10.3247,
|
8014 |
+
"step": 1139
|
8015 |
+
},
|
8016 |
+
{
|
8017 |
+
"epoch": 0.48279512969825306,
|
8018 |
+
"grad_norm": 0.04743657261133194,
|
8019 |
+
"learning_rate": 2.287128683504729e-05,
|
8020 |
+
"loss": 10.3272,
|
8021 |
+
"step": 1140
|
8022 |
+
},
|
8023 |
+
{
|
8024 |
+
"epoch": 0.48321863419798833,
|
8025 |
+
"grad_norm": 0.039697956293821335,
|
8026 |
+
"learning_rate": 2.27333756545863e-05,
|
8027 |
+
"loss": 10.3277,
|
8028 |
+
"step": 1141
|
8029 |
+
},
|
8030 |
+
{
|
8031 |
+
"epoch": 0.48364213869772366,
|
8032 |
+
"grad_norm": 0.03204713389277458,
|
8033 |
+
"learning_rate": 2.2595828183412172e-05,
|
8034 |
+
"loss": 10.3286,
|
8035 |
+
"step": 1142
|
8036 |
+
},
|
8037 |
+
{
|
8038 |
+
"epoch": 0.484065643197459,
|
8039 |
+
"grad_norm": 0.03838271647691727,
|
8040 |
+
"learning_rate": 2.2458645068988017e-05,
|
8041 |
+
"loss": 10.3272,
|
8042 |
+
"step": 1143
|
8043 |
+
},
|
8044 |
+
{
|
8045 |
+
"epoch": 0.48448914769719426,
|
8046 |
+
"grad_norm": 0.03789222985506058,
|
8047 |
+
"learning_rate": 2.2321826957061888e-05,
|
8048 |
+
"loss": 10.3319,
|
8049 |
+
"step": 1144
|
8050 |
+
},
|
8051 |
+
{
|
8052 |
+
"epoch": 0.4849126521969296,
|
8053 |
+
"grad_norm": 0.03325512632727623,
|
8054 |
+
"learning_rate": 2.218537449166379e-05,
|
8055 |
+
"loss": 10.3292,
|
8056 |
+
"step": 1145
|
8057 |
+
},
|
8058 |
+
{
|
8059 |
+
"epoch": 0.4853361566966649,
|
8060 |
+
"grad_norm": 0.038055550307035446,
|
8061 |
+
"learning_rate": 2.2049288315102412e-05,
|
8062 |
+
"loss": 10.3284,
|
8063 |
+
"step": 1146
|
8064 |
+
},
|
8065 |
+
{
|
8066 |
+
"epoch": 0.48575966119640024,
|
8067 |
+
"grad_norm": 0.04726425185799599,
|
8068 |
+
"learning_rate": 2.1913569067962303e-05,
|
8069 |
+
"loss": 10.3313,
|
8070 |
+
"step": 1147
|
8071 |
+
},
|
8072 |
+
{
|
8073 |
+
"epoch": 0.4861831656961355,
|
8074 |
+
"grad_norm": 0.046996794641017914,
|
8075 |
+
"learning_rate": 2.177821738910083e-05,
|
8076 |
+
"loss": 10.3295,
|
8077 |
+
"step": 1148
|
8078 |
+
},
|
8079 |
+
{
|
8080 |
+
"epoch": 0.48660667019587084,
|
8081 |
+
"grad_norm": 0.03838600963354111,
|
8082 |
+
"learning_rate": 2.1643233915645123e-05,
|
8083 |
+
"loss": 10.3312,
|
8084 |
+
"step": 1149
|
8085 |
+
},
|
8086 |
+
{
|
8087 |
+
"epoch": 0.48703017469560617,
|
8088 |
+
"grad_norm": 0.04116002097725868,
|
8089 |
+
"learning_rate": 2.1508619282989084e-05,
|
8090 |
+
"loss": 10.3282,
|
8091 |
+
"step": 1150
|
8092 |
+
},
|
8093 |
+
{
|
8094 |
+
"epoch": 0.48745367919534144,
|
8095 |
+
"grad_norm": 0.03648724406957626,
|
8096 |
+
"learning_rate": 2.1374374124790432e-05,
|
8097 |
+
"loss": 10.3321,
|
8098 |
+
"step": 1151
|
8099 |
+
},
|
8100 |
+
{
|
8101 |
+
"epoch": 0.48787718369507677,
|
8102 |
+
"grad_norm": 0.03551384434103966,
|
8103 |
+
"learning_rate": 2.124049907296768e-05,
|
8104 |
+
"loss": 10.3294,
|
8105 |
+
"step": 1152
|
8106 |
+
},
|
8107 |
+
{
|
8108 |
+
"epoch": 0.4883006881948121,
|
8109 |
+
"grad_norm": 0.04874705150723457,
|
8110 |
+
"learning_rate": 2.1106994757697175e-05,
|
8111 |
+
"loss": 10.3275,
|
8112 |
+
"step": 1153
|
8113 |
+
},
|
8114 |
+
{
|
8115 |
+
"epoch": 0.48872419269454737,
|
8116 |
+
"grad_norm": 0.0415460467338562,
|
8117 |
+
"learning_rate": 2.097386180741019e-05,
|
8118 |
+
"loss": 10.3313,
|
8119 |
+
"step": 1154
|
8120 |
+
},
|
8121 |
+
{
|
8122 |
+
"epoch": 0.4891476971942827,
|
8123 |
+
"grad_norm": 0.04439619183540344,
|
8124 |
+
"learning_rate": 2.084110084878975e-05,
|
8125 |
+
"loss": 10.332,
|
8126 |
+
"step": 1155
|
8127 |
+
},
|
8128 |
+
{
|
8129 |
+
"epoch": 0.489571201694018,
|
8130 |
+
"grad_norm": 0.04033254459500313,
|
8131 |
+
"learning_rate": 2.070871250676808e-05,
|
8132 |
+
"loss": 10.3328,
|
8133 |
+
"step": 1156
|
8134 |
+
},
|
8135 |
+
{
|
8136 |
+
"epoch": 0.4899947061937533,
|
8137 |
+
"grad_norm": 0.038417741656303406,
|
8138 |
+
"learning_rate": 2.0576697404523214e-05,
|
8139 |
+
"loss": 10.3319,
|
8140 |
+
"step": 1157
|
8141 |
+
},
|
8142 |
+
{
|
8143 |
+
"epoch": 0.4904182106934886,
|
8144 |
+
"grad_norm": 0.0399969108402729,
|
8145 |
+
"learning_rate": 2.0445056163476374e-05,
|
8146 |
+
"loss": 10.3299,
|
8147 |
+
"step": 1158
|
8148 |
+
},
|
8149 |
+
{
|
8150 |
+
"epoch": 0.49084171519322395,
|
8151 |
+
"grad_norm": 0.04094449430704117,
|
8152 |
+
"learning_rate": 2.0313789403288942e-05,
|
8153 |
+
"loss": 10.3295,
|
8154 |
+
"step": 1159
|
8155 |
+
},
|
8156 |
+
{
|
8157 |
+
"epoch": 0.4912652196929592,
|
8158 |
+
"grad_norm": 0.037713877856731415,
|
8159 |
+
"learning_rate": 2.0182897741859497e-05,
|
8160 |
+
"loss": 10.3268,
|
8161 |
+
"step": 1160
|
8162 |
+
},
|
8163 |
+
{
|
8164 |
+
"epoch": 0.49168872419269455,
|
8165 |
+
"grad_norm": 0.0324854739010334,
|
8166 |
+
"learning_rate": 2.005238179532102e-05,
|
8167 |
+
"loss": 10.3302,
|
8168 |
+
"step": 1161
|
8169 |
+
},
|
8170 |
+
{
|
8171 |
+
"epoch": 0.4921122286924299,
|
8172 |
+
"grad_norm": 0.038544911891222,
|
8173 |
+
"learning_rate": 1.9922242178037864e-05,
|
8174 |
+
"loss": 10.3284,
|
8175 |
+
"step": 1162
|
8176 |
+
},
|
8177 |
+
{
|
8178 |
+
"epoch": 0.49253573319216515,
|
8179 |
+
"grad_norm": 0.03958024084568024,
|
8180 |
+
"learning_rate": 1.9792479502602967e-05,
|
8181 |
+
"loss": 10.3278,
|
8182 |
+
"step": 1163
|
8183 |
+
},
|
8184 |
+
{
|
8185 |
+
"epoch": 0.4929592376919005,
|
8186 |
+
"grad_norm": 0.041991736739873886,
|
8187 |
+
"learning_rate": 1.9663094379834912e-05,
|
8188 |
+
"loss": 10.3265,
|
8189 |
+
"step": 1164
|
8190 |
+
},
|
8191 |
+
{
|
8192 |
+
"epoch": 0.4933827421916358,
|
8193 |
+
"grad_norm": 0.03454459831118584,
|
8194 |
+
"learning_rate": 1.9534087418775093e-05,
|
8195 |
+
"loss": 10.3287,
|
8196 |
+
"step": 1165
|
8197 |
+
},
|
8198 |
+
{
|
8199 |
+
"epoch": 0.4938062466913711,
|
8200 |
+
"grad_norm": 0.043180350214242935,
|
8201 |
+
"learning_rate": 1.940545922668472e-05,
|
8202 |
+
"loss": 10.3278,
|
8203 |
+
"step": 1166
|
8204 |
+
},
|
8205 |
+
{
|
8206 |
+
"epoch": 0.4942297511911064,
|
8207 |
+
"grad_norm": 0.042323771864175797,
|
8208 |
+
"learning_rate": 1.9277210409042222e-05,
|
8209 |
+
"loss": 10.3287,
|
8210 |
+
"step": 1167
|
8211 |
+
},
|
8212 |
+
{
|
8213 |
+
"epoch": 0.49465325569084173,
|
8214 |
+
"grad_norm": 0.047895364463329315,
|
8215 |
+
"learning_rate": 1.9149341569540158e-05,
|
8216 |
+
"loss": 10.3296,
|
8217 |
+
"step": 1168
|
8218 |
+
},
|
8219 |
+
{
|
8220 |
+
"epoch": 0.495076760190577,
|
8221 |
+
"grad_norm": 0.044591717422008514,
|
8222 |
+
"learning_rate": 1.9021853310082383e-05,
|
8223 |
+
"loss": 10.33,
|
8224 |
+
"step": 1169
|
8225 |
+
},
|
8226 |
+
{
|
8227 |
+
"epoch": 0.49550026469031233,
|
8228 |
+
"grad_norm": 0.04516749829053879,
|
8229 |
+
"learning_rate": 1.88947462307814e-05,
|
8230 |
+
"loss": 10.3273,
|
8231 |
+
"step": 1170
|
8232 |
+
},
|
8233 |
+
{
|
8234 |
+
"epoch": 0.49592376919004766,
|
8235 |
+
"grad_norm": 0.05060568079352379,
|
8236 |
+
"learning_rate": 1.8768020929955377e-05,
|
8237 |
+
"loss": 10.3297,
|
8238 |
+
"step": 1171
|
8239 |
+
},
|
8240 |
+
{
|
8241 |
+
"epoch": 0.49634727368978293,
|
8242 |
+
"grad_norm": 0.03802963346242905,
|
8243 |
+
"learning_rate": 1.8641678004125363e-05,
|
8244 |
+
"loss": 10.3296,
|
8245 |
+
"step": 1172
|
8246 |
+
},
|
8247 |
+
{
|
8248 |
+
"epoch": 0.49677077818951826,
|
8249 |
+
"grad_norm": 0.03468478471040726,
|
8250 |
+
"learning_rate": 1.8515718048012544e-05,
|
8251 |
+
"loss": 10.3282,
|
8252 |
+
"step": 1173
|
8253 |
+
},
|
8254 |
+
{
|
8255 |
+
"epoch": 0.4971942826892536,
|
8256 |
+
"grad_norm": 0.06058938428759575,
|
8257 |
+
"learning_rate": 1.8390141654535265e-05,
|
8258 |
+
"loss": 10.328,
|
8259 |
+
"step": 1174
|
8260 |
+
},
|
8261 |
+
{
|
8262 |
+
"epoch": 0.49761778718898886,
|
8263 |
+
"grad_norm": 0.03682539984583855,
|
8264 |
+
"learning_rate": 1.826494941480654e-05,
|
8265 |
+
"loss": 10.3313,
|
8266 |
+
"step": 1175
|
8267 |
+
},
|
8268 |
+
{
|
8269 |
+
"epoch": 0.4980412916887242,
|
8270 |
+
"grad_norm": 0.0412411093711853,
|
8271 |
+
"learning_rate": 1.8140141918131004e-05,
|
8272 |
+
"loss": 10.3318,
|
8273 |
+
"step": 1176
|
8274 |
+
},
|
8275 |
+
{
|
8276 |
+
"epoch": 0.4984647961884595,
|
8277 |
+
"grad_norm": 0.041588738560676575,
|
8278 |
+
"learning_rate": 1.801571975200218e-05,
|
8279 |
+
"loss": 10.3325,
|
8280 |
+
"step": 1177
|
8281 |
+
},
|
8282 |
+
{
|
8283 |
+
"epoch": 0.4988883006881948,
|
8284 |
+
"grad_norm": 0.02872418612241745,
|
8285 |
+
"learning_rate": 1.789168350209983e-05,
|
8286 |
+
"loss": 10.3294,
|
8287 |
+
"step": 1178
|
8288 |
+
},
|
8289 |
+
{
|
8290 |
+
"epoch": 0.4993118051879301,
|
8291 |
+
"grad_norm": 0.039319563657045364,
|
8292 |
+
"learning_rate": 1.7768033752287183e-05,
|
8293 |
+
"loss": 10.3259,
|
8294 |
+
"step": 1179
|
8295 |
+
},
|
8296 |
+
{
|
8297 |
+
"epoch": 0.49973530968766544,
|
8298 |
+
"grad_norm": 0.041396401822566986,
|
8299 |
+
"learning_rate": 1.7644771084608015e-05,
|
8300 |
+
"loss": 10.3308,
|
8301 |
+
"step": 1180
|
8302 |
+
},
|
8303 |
+
{
|
8304 |
+
"epoch": 0.5001588141874007,
|
8305 |
+
"grad_norm": 0.03175836428999901,
|
8306 |
+
"learning_rate": 1.7521896079284095e-05,
|
8307 |
+
"loss": 10.3269,
|
8308 |
+
"step": 1181
|
8309 |
+
},
|
8310 |
+
{
|
8311 |
+
"epoch": 0.5005823186871361,
|
8312 |
+
"grad_norm": 0.047037266194820404,
|
8313 |
+
"learning_rate": 1.739940931471239e-05,
|
8314 |
+
"loss": 10.3296,
|
8315 |
+
"step": 1182
|
8316 |
+
},
|
8317 |
+
{
|
8318 |
+
"epoch": 0.5010058231868714,
|
8319 |
+
"grad_norm": 0.04150497913360596,
|
8320 |
+
"learning_rate": 1.7277311367462335e-05,
|
8321 |
+
"loss": 10.3282,
|
8322 |
+
"step": 1183
|
8323 |
+
},
|
8324 |
+
{
|
8325 |
+
"epoch": 0.5014293276866066,
|
8326 |
+
"grad_norm": 0.04559915512800217,
|
8327 |
+
"learning_rate": 1.7155602812273152e-05,
|
8328 |
+
"loss": 10.3349,
|
8329 |
+
"step": 1184
|
8330 |
+
},
|
8331 |
+
{
|
8332 |
+
"epoch": 0.501852832186342,
|
8333 |
+
"grad_norm": 0.04043659567832947,
|
8334 |
+
"learning_rate": 1.7034284222051012e-05,
|
8335 |
+
"loss": 10.3272,
|
8336 |
+
"step": 1185
|
8337 |
+
},
|
8338 |
+
{
|
8339 |
+
"epoch": 0.5022763366860773,
|
8340 |
+
"grad_norm": 0.04098321869969368,
|
8341 |
+
"learning_rate": 1.6913356167866578e-05,
|
8342 |
+
"loss": 10.3316,
|
8343 |
+
"step": 1186
|
8344 |
+
},
|
8345 |
+
{
|
8346 |
+
"epoch": 0.5026998411858126,
|
8347 |
+
"grad_norm": 0.035557378083467484,
|
8348 |
+
"learning_rate": 1.679281921895215e-05,
|
8349 |
+
"loss": 10.3297,
|
8350 |
+
"step": 1187
|
8351 |
+
},
|
8352 |
+
{
|
8353 |
+
"epoch": 0.503123345685548,
|
8354 |
+
"grad_norm": 0.05428796261548996,
|
8355 |
+
"learning_rate": 1.6672673942698925e-05,
|
8356 |
+
"loss": 10.3232,
|
8357 |
+
"step": 1188
|
8358 |
+
},
|
8359 |
+
{
|
8360 |
+
"epoch": 0.5035468501852832,
|
8361 |
+
"grad_norm": 0.04074474796652794,
|
8362 |
+
"learning_rate": 1.655292090465449e-05,
|
8363 |
+
"loss": 10.3246,
|
8364 |
+
"step": 1189
|
8365 |
+
},
|
8366 |
+
{
|
8367 |
+
"epoch": 0.5039703546850185,
|
8368 |
+
"grad_norm": 0.03878313675522804,
|
8369 |
+
"learning_rate": 1.6433560668520176e-05,
|
8370 |
+
"loss": 10.3246,
|
8371 |
+
"step": 1190
|
8372 |
+
},
|
8373 |
+
{
|
8374 |
+
"epoch": 0.5043938591847539,
|
8375 |
+
"grad_norm": 0.03631461411714554,
|
8376 |
+
"learning_rate": 1.6314593796148136e-05,
|
8377 |
+
"loss": 10.331,
|
8378 |
+
"step": 1191
|
8379 |
+
},
|
8380 |
+
{
|
8381 |
+
"epoch": 0.5048173636844892,
|
8382 |
+
"grad_norm": 0.036254312843084335,
|
8383 |
+
"learning_rate": 1.6196020847539006e-05,
|
8384 |
+
"loss": 10.3227,
|
8385 |
+
"step": 1192
|
8386 |
+
},
|
8387 |
+
{
|
8388 |
+
"epoch": 0.5052408681842244,
|
8389 |
+
"grad_norm": 0.0433821827173233,
|
8390 |
+
"learning_rate": 1.6077842380839114e-05,
|
8391 |
+
"loss": 10.3281,
|
8392 |
+
"step": 1193
|
8393 |
+
},
|
8394 |
+
{
|
8395 |
+
"epoch": 0.5056643726839598,
|
8396 |
+
"grad_norm": 0.038387492299079895,
|
8397 |
+
"learning_rate": 1.5960058952337887e-05,
|
8398 |
+
"loss": 10.3268,
|
8399 |
+
"step": 1194
|
8400 |
+
},
|
8401 |
+
{
|
8402 |
+
"epoch": 0.5060878771836951,
|
8403 |
+
"grad_norm": 0.03496537357568741,
|
8404 |
+
"learning_rate": 1.5842671116465235e-05,
|
8405 |
+
"loss": 10.3243,
|
8406 |
+
"step": 1195
|
8407 |
+
},
|
8408 |
+
{
|
8409 |
+
"epoch": 0.5065113816834304,
|
8410 |
+
"grad_norm": 0.03603898733854294,
|
8411 |
+
"learning_rate": 1.5725679425788853e-05,
|
8412 |
+
"loss": 10.3326,
|
8413 |
+
"step": 1196
|
8414 |
+
},
|
8415 |
+
{
|
8416 |
+
"epoch": 0.5069348861831657,
|
8417 |
+
"grad_norm": 0.039027996361255646,
|
8418 |
+
"learning_rate": 1.5609084431011867e-05,
|
8419 |
+
"loss": 10.326,
|
8420 |
+
"step": 1197
|
8421 |
+
},
|
8422 |
+
{
|
8423 |
+
"epoch": 0.507358390682901,
|
8424 |
+
"grad_norm": 0.038697127252817154,
|
8425 |
+
"learning_rate": 1.5492886680969963e-05,
|
8426 |
+
"loss": 10.3332,
|
8427 |
+
"step": 1198
|
8428 |
+
},
|
8429 |
+
{
|
8430 |
+
"epoch": 0.5077818951826363,
|
8431 |
+
"grad_norm": 0.038827307522296906,
|
8432 |
+
"learning_rate": 1.5377086722628897e-05,
|
8433 |
+
"loss": 10.3302,
|
8434 |
+
"step": 1199
|
8435 |
+
},
|
8436 |
+
{
|
8437 |
+
"epoch": 0.5082053996823717,
|
8438 |
+
"grad_norm": 0.038713064044713974,
|
8439 |
+
"learning_rate": 1.526168510108199e-05,
|
8440 |
+
"loss": 10.3286,
|
8441 |
+
"step": 1200
|
8442 |
+
},
|
8443 |
+
{
|
8444 |
+
"epoch": 0.5086289041821069,
|
8445 |
+
"grad_norm": 0.04002411663532257,
|
8446 |
+
"learning_rate": 1.514668235954757e-05,
|
8447 |
+
"loss": 10.3318,
|
8448 |
+
"step": 1201
|
8449 |
+
},
|
8450 |
+
{
|
8451 |
+
"epoch": 0.5090524086818422,
|
8452 |
+
"grad_norm": 0.03701453655958176,
|
8453 |
+
"learning_rate": 1.5032079039366209e-05,
|
8454 |
+
"loss": 10.3271,
|
8455 |
+
"step": 1202
|
8456 |
+
},
|
8457 |
+
{
|
8458 |
+
"epoch": 0.5094759131815776,
|
8459 |
+
"grad_norm": 0.04189831018447876,
|
8460 |
+
"learning_rate": 1.4917875679998462e-05,
|
8461 |
+
"loss": 10.3308,
|
8462 |
+
"step": 1203
|
8463 |
+
},
|
8464 |
+
{
|
8465 |
+
"epoch": 0.5098994176813129,
|
8466 |
+
"grad_norm": 0.03437069430947304,
|
8467 |
+
"learning_rate": 1.4804072819022108e-05,
|
8468 |
+
"loss": 10.3282,
|
8469 |
+
"step": 1204
|
8470 |
+
},
|
8471 |
+
{
|
8472 |
+
"epoch": 0.5103229221810481,
|
8473 |
+
"grad_norm": 0.038920581340789795,
|
8474 |
+
"learning_rate": 1.469067099212973e-05,
|
8475 |
+
"loss": 10.3315,
|
8476 |
+
"step": 1205
|
8477 |
+
},
|
8478 |
+
{
|
8479 |
+
"epoch": 0.5107464266807835,
|
8480 |
+
"grad_norm": 0.05227049067616463,
|
8481 |
+
"learning_rate": 1.4577670733126203e-05,
|
8482 |
+
"loss": 10.3242,
|
8483 |
+
"step": 1206
|
8484 |
+
},
|
8485 |
+
{
|
8486 |
+
"epoch": 0.5111699311805188,
|
8487 |
+
"grad_norm": 0.0365944467484951,
|
8488 |
+
"learning_rate": 1.4465072573926042e-05,
|
8489 |
+
"loss": 10.3315,
|
8490 |
+
"step": 1207
|
8491 |
+
},
|
8492 |
+
{
|
8493 |
+
"epoch": 0.5115934356802541,
|
8494 |
+
"grad_norm": 0.03627532348036766,
|
8495 |
+
"learning_rate": 1.435287704455105e-05,
|
8496 |
+
"loss": 10.329,
|
8497 |
+
"step": 1208
|
8498 |
+
},
|
8499 |
+
{
|
8500 |
+
"epoch": 0.5120169401799894,
|
8501 |
+
"grad_norm": 0.04100056737661362,
|
8502 |
+
"learning_rate": 1.4241084673127869e-05,
|
8503 |
+
"loss": 10.3277,
|
8504 |
+
"step": 1209
|
8505 |
+
},
|
8506 |
+
{
|
8507 |
+
"epoch": 0.5124404446797247,
|
8508 |
+
"grad_norm": 0.03915872797369957,
|
8509 |
+
"learning_rate": 1.4129695985885228e-05,
|
8510 |
+
"loss": 10.3293,
|
8511 |
+
"step": 1210
|
8512 |
+
},
|
8513 |
+
{
|
8514 |
+
"epoch": 0.51286394917946,
|
8515 |
+
"grad_norm": 0.04233933612704277,
|
8516 |
+
"learning_rate": 1.4018711507151738e-05,
|
8517 |
+
"loss": 10.3347,
|
8518 |
+
"step": 1211
|
8519 |
+
},
|
8520 |
+
{
|
8521 |
+
"epoch": 0.5132874536791954,
|
8522 |
+
"grad_norm": 0.0427318811416626,
|
8523 |
+
"learning_rate": 1.3908131759353304e-05,
|
8524 |
+
"loss": 10.325,
|
8525 |
+
"step": 1212
|
8526 |
+
},
|
8527 |
+
{
|
8528 |
+
"epoch": 0.5137109581789306,
|
8529 |
+
"grad_norm": 0.04049833491444588,
|
8530 |
+
"learning_rate": 1.3797957263010664e-05,
|
8531 |
+
"loss": 10.3318,
|
8532 |
+
"step": 1213
|
8533 |
+
},
|
8534 |
+
{
|
8535 |
+
"epoch": 0.5141344626786659,
|
8536 |
+
"grad_norm": 0.034728001803159714,
|
8537 |
+
"learning_rate": 1.3688188536736968e-05,
|
8538 |
+
"loss": 10.3276,
|
8539 |
+
"step": 1214
|
8540 |
+
},
|
8541 |
+
{
|
8542 |
+
"epoch": 0.5145579671784013,
|
8543 |
+
"grad_norm": 0.051667772233486176,
|
8544 |
+
"learning_rate": 1.3578826097235343e-05,
|
8545 |
+
"loss": 10.3275,
|
8546 |
+
"step": 1215
|
8547 |
+
},
|
8548 |
+
{
|
8549 |
+
"epoch": 0.5149814716781366,
|
8550 |
+
"grad_norm": 0.035801179707050323,
|
8551 |
+
"learning_rate": 1.3469870459296408e-05,
|
8552 |
+
"loss": 10.3311,
|
8553 |
+
"step": 1216
|
8554 |
+
},
|
8555 |
+
{
|
8556 |
+
"epoch": 0.5154049761778718,
|
8557 |
+
"grad_norm": 0.04537419602274895,
|
8558 |
+
"learning_rate": 1.3361322135795918e-05,
|
8559 |
+
"loss": 10.334,
|
8560 |
+
"step": 1217
|
8561 |
+
},
|
8562 |
+
{
|
8563 |
+
"epoch": 0.5158284806776072,
|
8564 |
+
"grad_norm": 0.03713669627904892,
|
8565 |
+
"learning_rate": 1.3253181637692324e-05,
|
8566 |
+
"loss": 10.3255,
|
8567 |
+
"step": 1218
|
8568 |
+
},
|
8569 |
+
{
|
8570 |
+
"epoch": 0.5162519851773425,
|
8571 |
+
"grad_norm": 0.05887385830283165,
|
8572 |
+
"learning_rate": 1.3145449474024285e-05,
|
8573 |
+
"loss": 10.3252,
|
8574 |
+
"step": 1219
|
8575 |
+
},
|
8576 |
+
{
|
8577 |
+
"epoch": 0.5166754896770778,
|
8578 |
+
"grad_norm": 0.04287916049361229,
|
8579 |
+
"learning_rate": 1.3038126151908492e-05,
|
8580 |
+
"loss": 10.3359,
|
8581 |
+
"step": 1220
|
8582 |
+
},
|
8583 |
+
{
|
8584 |
+
"epoch": 0.5170989941768132,
|
8585 |
+
"grad_norm": 0.038009535521268845,
|
8586 |
+
"learning_rate": 1.2931212176536977e-05,
|
8587 |
+
"loss": 10.3313,
|
8588 |
+
"step": 1221
|
8589 |
+
},
|
8590 |
+
{
|
8591 |
+
"epoch": 0.5175224986765484,
|
8592 |
+
"grad_norm": 0.02924671769142151,
|
8593 |
+
"learning_rate": 1.2824708051175016e-05,
|
8594 |
+
"loss": 10.3259,
|
8595 |
+
"step": 1222
|
8596 |
+
},
|
8597 |
+
{
|
8598 |
+
"epoch": 0.5179460031762837,
|
8599 |
+
"grad_norm": 0.05172424390912056,
|
8600 |
+
"learning_rate": 1.2718614277158591e-05,
|
8601 |
+
"loss": 10.329,
|
8602 |
+
"step": 1223
|
8603 |
+
},
|
8604 |
+
{
|
8605 |
+
"epoch": 0.5183695076760191,
|
8606 |
+
"grad_norm": 0.044165514409542084,
|
8607 |
+
"learning_rate": 1.2612931353892077e-05,
|
8608 |
+
"loss": 10.3276,
|
8609 |
+
"step": 1224
|
8610 |
+
},
|
8611 |
+
{
|
8612 |
+
"epoch": 0.5187930121757544,
|
8613 |
+
"grad_norm": 0.036774665117263794,
|
8614 |
+
"learning_rate": 1.2507659778845904e-05,
|
8615 |
+
"loss": 10.3311,
|
8616 |
+
"step": 1225
|
8617 |
+
},
|
8618 |
+
{
|
8619 |
+
"epoch": 0.5192165166754896,
|
8620 |
+
"grad_norm": 0.04366540163755417,
|
8621 |
+
"learning_rate": 1.2402800047554208e-05,
|
8622 |
+
"loss": 10.3231,
|
8623 |
+
"step": 1226
|
8624 |
+
},
|
8625 |
+
{
|
8626 |
+
"epoch": 0.519640021175225,
|
8627 |
+
"grad_norm": 0.03668185696005821,
|
8628 |
+
"learning_rate": 1.2298352653612477e-05,
|
8629 |
+
"loss": 10.3292,
|
8630 |
+
"step": 1227
|
8631 |
+
},
|
8632 |
+
{
|
8633 |
+
"epoch": 0.5200635256749603,
|
8634 |
+
"grad_norm": 0.05343744531273842,
|
8635 |
+
"learning_rate": 1.2194318088675283e-05,
|
8636 |
+
"loss": 10.3244,
|
8637 |
+
"step": 1228
|
8638 |
+
},
|
8639 |
+
{
|
8640 |
+
"epoch": 0.5204870301746956,
|
8641 |
+
"grad_norm": 0.03161430358886719,
|
8642 |
+
"learning_rate": 1.2090696842453897e-05,
|
8643 |
+
"loss": 10.3317,
|
8644 |
+
"step": 1229
|
8645 |
+
},
|
8646 |
+
{
|
8647 |
+
"epoch": 0.5209105346744309,
|
8648 |
+
"grad_norm": 0.037658821791410446,
|
8649 |
+
"learning_rate": 1.1987489402713981e-05,
|
8650 |
+
"loss": 10.3299,
|
8651 |
+
"step": 1230
|
8652 |
+
},
|
8653 |
+
{
|
8654 |
+
"epoch": 0.5213340391741662,
|
8655 |
+
"grad_norm": 0.05165412649512291,
|
8656 |
+
"learning_rate": 1.188469625527342e-05,
|
8657 |
+
"loss": 10.3303,
|
8658 |
+
"step": 1231
|
8659 |
+
},
|
8660 |
+
{
|
8661 |
+
"epoch": 0.5217575436739015,
|
8662 |
+
"grad_norm": 0.04333394393324852,
|
8663 |
+
"learning_rate": 1.1782317883999915e-05,
|
8664 |
+
"loss": 10.3308,
|
8665 |
+
"step": 1232
|
8666 |
+
},
|
8667 |
+
{
|
8668 |
+
"epoch": 0.5221810481736369,
|
8669 |
+
"grad_norm": 0.04078406095504761,
|
8670 |
+
"learning_rate": 1.1680354770808654e-05,
|
8671 |
+
"loss": 10.3291,
|
8672 |
+
"step": 1233
|
8673 |
+
},
|
8674 |
+
{
|
8675 |
+
"epoch": 0.5226045526733721,
|
8676 |
+
"grad_norm": 0.0339806005358696,
|
8677 |
+
"learning_rate": 1.1578807395660207e-05,
|
8678 |
+
"loss": 10.33,
|
8679 |
+
"step": 1234
|
8680 |
+
},
|
8681 |
+
{
|
8682 |
+
"epoch": 0.5230280571731075,
|
8683 |
+
"grad_norm": 0.05339012295007706,
|
8684 |
+
"learning_rate": 1.1477676236558165e-05,
|
8685 |
+
"loss": 10.3297,
|
8686 |
+
"step": 1235
|
8687 |
+
},
|
8688 |
+
{
|
8689 |
+
"epoch": 0.5234515616728428,
|
8690 |
+
"grad_norm": 0.03436762094497681,
|
8691 |
+
"learning_rate": 1.137696176954689e-05,
|
8692 |
+
"loss": 10.3304,
|
8693 |
+
"step": 1236
|
8694 |
+
},
|
8695 |
+
{
|
8696 |
+
"epoch": 0.5238750661725781,
|
8697 |
+
"grad_norm": 0.03947856277227402,
|
8698 |
+
"learning_rate": 1.1276664468709342e-05,
|
8699 |
+
"loss": 10.3264,
|
8700 |
+
"step": 1237
|
8701 |
+
},
|
8702 |
+
{
|
8703 |
+
"epoch": 0.5242985706723134,
|
8704 |
+
"grad_norm": 0.04072525352239609,
|
8705 |
+
"learning_rate": 1.1176784806164676e-05,
|
8706 |
+
"loss": 10.3315,
|
8707 |
+
"step": 1238
|
8708 |
+
},
|
8709 |
+
{
|
8710 |
+
"epoch": 0.5247220751720487,
|
8711 |
+
"grad_norm": 0.03758727014064789,
|
8712 |
+
"learning_rate": 1.1077323252066296e-05,
|
8713 |
+
"loss": 10.3283,
|
8714 |
+
"step": 1239
|
8715 |
+
},
|
8716 |
+
{
|
8717 |
+
"epoch": 0.525145579671784,
|
8718 |
+
"grad_norm": 0.04229472205042839,
|
8719 |
+
"learning_rate": 1.0978280274599418e-05,
|
8720 |
+
"loss": 10.3296,
|
8721 |
+
"step": 1240
|
8722 |
+
},
|
8723 |
+
{
|
8724 |
+
"epoch": 0.5255690841715194,
|
8725 |
+
"grad_norm": 0.03863406553864479,
|
8726 |
+
"learning_rate": 1.0879656339978882e-05,
|
8727 |
+
"loss": 10.3289,
|
8728 |
+
"step": 1241
|
8729 |
+
},
|
8730 |
+
{
|
8731 |
+
"epoch": 0.5259925886712546,
|
8732 |
+
"grad_norm": 0.03158913180232048,
|
8733 |
+
"learning_rate": 1.078145191244706e-05,
|
8734 |
+
"loss": 10.327,
|
8735 |
+
"step": 1242
|
8736 |
+
},
|
8737 |
+
{
|
8738 |
+
"epoch": 0.5264160931709899,
|
8739 |
+
"grad_norm": 0.04095543548464775,
|
8740 |
+
"learning_rate": 1.0683667454271695e-05,
|
8741 |
+
"loss": 10.3297,
|
8742 |
+
"step": 1243
|
8743 |
+
},
|
8744 |
+
{
|
8745 |
+
"epoch": 0.5268395976707253,
|
8746 |
+
"grad_norm": 0.031121550127863884,
|
8747 |
+
"learning_rate": 1.0586303425743493e-05,
|
8748 |
+
"loss": 10.3293,
|
8749 |
+
"step": 1244
|
8750 |
+
},
|
8751 |
+
{
|
8752 |
+
"epoch": 0.5272631021704606,
|
8753 |
+
"grad_norm": 0.044046152383089066,
|
8754 |
+
"learning_rate": 1.0489360285174232e-05,
|
8755 |
+
"loss": 10.3295,
|
8756 |
+
"step": 1245
|
8757 |
+
},
|
8758 |
+
{
|
8759 |
+
"epoch": 0.5276866066701958,
|
8760 |
+
"grad_norm": 0.03840816766023636,
|
8761 |
+
"learning_rate": 1.0392838488894463e-05,
|
8762 |
+
"loss": 10.3265,
|
8763 |
+
"step": 1246
|
8764 |
+
},
|
8765 |
+
{
|
8766 |
+
"epoch": 0.5281101111699312,
|
8767 |
+
"grad_norm": 0.034203726798295975,
|
8768 |
+
"learning_rate": 1.0296738491251357e-05,
|
8769 |
+
"loss": 10.3288,
|
8770 |
+
"step": 1247
|
8771 |
+
},
|
8772 |
+
{
|
8773 |
+
"epoch": 0.5285336156696665,
|
8774 |
+
"grad_norm": 0.04024311155080795,
|
8775 |
+
"learning_rate": 1.0201060744606639e-05,
|
8776 |
+
"loss": 10.3293,
|
8777 |
+
"step": 1248
|
8778 |
+
},
|
8779 |
+
{
|
8780 |
+
"epoch": 0.5289571201694018,
|
8781 |
+
"grad_norm": 0.0303033534437418,
|
8782 |
+
"learning_rate": 1.0105805699334336e-05,
|
8783 |
+
"loss": 10.3288,
|
8784 |
+
"step": 1249
|
8785 |
+
},
|
8786 |
+
{
|
8787 |
+
"epoch": 0.5293806246691372,
|
8788 |
+
"grad_norm": 0.03764592111110687,
|
8789 |
+
"learning_rate": 1.0010973803818857e-05,
|
8790 |
+
"loss": 10.3255,
|
8791 |
+
"step": 1250
|
8792 |
+
},
|
8793 |
+
{
|
8794 |
+
"epoch": 0.5298041291688724,
|
8795 |
+
"grad_norm": 0.03739494830369949,
|
8796 |
+
"learning_rate": 9.91656550445268e-06,
|
8797 |
+
"loss": 10.3252,
|
8798 |
+
"step": 1251
|
8799 |
+
},
|
8800 |
+
{
|
8801 |
+
"epoch": 0.5302276336686077,
|
8802 |
+
"grad_norm": 0.04495794326066971,
|
8803 |
+
"learning_rate": 9.822581245634321e-06,
|
8804 |
+
"loss": 10.3267,
|
8805 |
+
"step": 1252
|
8806 |
+
},
|
8807 |
+
{
|
8808 |
+
"epoch": 0.5306511381683431,
|
8809 |
+
"grad_norm": 0.0339081697165966,
|
8810 |
+
"learning_rate": 9.729021469766275e-06,
|
8811 |
+
"loss": 10.3296,
|
8812 |
+
"step": 1253
|
8813 |
+
},
|
8814 |
+
{
|
8815 |
+
"epoch": 0.5310746426680784,
|
8816 |
+
"grad_norm": 0.041814595460891724,
|
8817 |
+
"learning_rate": 9.635886617252975e-06,
|
8818 |
+
"loss": 10.3258,
|
8819 |
+
"step": 1254
|
8820 |
+
},
|
8821 |
+
{
|
8822 |
+
"epoch": 0.5314981471678136,
|
8823 |
+
"grad_norm": 0.041015561670064926,
|
8824 |
+
"learning_rate": 9.543177126498537e-06,
|
8825 |
+
"loss": 10.331,
|
8826 |
+
"step": 1255
|
8827 |
+
},
|
8828 |
+
{
|
8829 |
+
"epoch": 0.531921651667549,
|
8830 |
+
"grad_norm": 0.05070764943957329,
|
8831 |
+
"learning_rate": 9.450893433904894e-06,
|
8832 |
+
"loss": 10.3267,
|
8833 |
+
"step": 1256
|
8834 |
+
},
|
8835 |
+
{
|
8836 |
+
"epoch": 0.5323451561672843,
|
8837 |
+
"grad_norm": 0.03864545747637749,
|
8838 |
+
"learning_rate": 9.359035973869645e-06,
|
8839 |
+
"loss": 10.3288,
|
8840 |
+
"step": 1257
|
8841 |
+
},
|
8842 |
+
{
|
8843 |
+
"epoch": 0.5327686606670196,
|
8844 |
+
"grad_norm": 0.03652477264404297,
|
8845 |
+
"learning_rate": 9.267605178784033e-06,
|
8846 |
+
"loss": 10.3281,
|
8847 |
+
"step": 1258
|
8848 |
+
},
|
8849 |
+
{
|
8850 |
+
"epoch": 0.5331921651667549,
|
8851 |
+
"grad_norm": 0.04767770692706108,
|
8852 |
+
"learning_rate": 9.17660147903091e-06,
|
8853 |
+
"loss": 10.329,
|
8854 |
+
"step": 1259
|
8855 |
+
},
|
8856 |
+
{
|
8857 |
+
"epoch": 0.5336156696664902,
|
8858 |
+
"grad_norm": 0.0491943396627903,
|
8859 |
+
"learning_rate": 9.086025302982648e-06,
|
8860 |
+
"loss": 10.3218,
|
8861 |
+
"step": 1260
|
8862 |
+
},
|
8863 |
+
{
|
8864 |
+
"epoch": 0.5340391741662255,
|
8865 |
+
"grad_norm": 0.04111519455909729,
|
8866 |
+
"learning_rate": 8.995877076999271e-06,
|
8867 |
+
"loss": 10.3271,
|
8868 |
+
"step": 1261
|
8869 |
+
},
|
8870 |
+
{
|
8871 |
+
"epoch": 0.5344626786659609,
|
8872 |
+
"grad_norm": 0.0426565445959568,
|
8873 |
+
"learning_rate": 8.906157225426315e-06,
|
8874 |
+
"loss": 10.3284,
|
8875 |
+
"step": 1262
|
8876 |
+
},
|
8877 |
+
{
|
8878 |
+
"epoch": 0.5348861831656961,
|
8879 |
+
"grad_norm": 0.05047602206468582,
|
8880 |
+
"learning_rate": 8.816866170592841e-06,
|
8881 |
+
"loss": 10.327,
|
8882 |
+
"step": 1263
|
8883 |
+
},
|
8884 |
+
{
|
8885 |
+
"epoch": 0.5353096876654314,
|
8886 |
+
"grad_norm": 0.0326421819627285,
|
8887 |
+
"learning_rate": 8.728004332809514e-06,
|
8888 |
+
"loss": 10.3227,
|
8889 |
+
"step": 1264
|
8890 |
+
},
|
8891 |
+
{
|
8892 |
+
"epoch": 0.5357331921651668,
|
8893 |
+
"grad_norm": 0.03768382966518402,
|
8894 |
+
"learning_rate": 8.639572130366602e-06,
|
8895 |
+
"loss": 10.3327,
|
8896 |
+
"step": 1265
|
8897 |
+
},
|
8898 |
+
{
|
8899 |
+
"epoch": 0.5361566966649021,
|
8900 |
+
"grad_norm": 0.04435701295733452,
|
8901 |
+
"learning_rate": 8.55156997953197e-06,
|
8902 |
+
"loss": 10.3269,
|
8903 |
+
"step": 1266
|
8904 |
+
},
|
8905 |
+
{
|
8906 |
+
"epoch": 0.5365802011646373,
|
8907 |
+
"grad_norm": 0.04310356825590134,
|
8908 |
+
"learning_rate": 8.463998294549146e-06,
|
8909 |
+
"loss": 10.3247,
|
8910 |
+
"step": 1267
|
8911 |
+
},
|
8912 |
+
{
|
8913 |
+
"epoch": 0.5370037056643727,
|
8914 |
+
"grad_norm": 0.04026800021529198,
|
8915 |
+
"learning_rate": 8.37685748763538e-06,
|
8916 |
+
"loss": 10.3314,
|
8917 |
+
"step": 1268
|
8918 |
+
},
|
8919 |
+
{
|
8920 |
+
"epoch": 0.537427210164108,
|
8921 |
+
"grad_norm": 0.0372571237385273,
|
8922 |
+
"learning_rate": 8.290147968979688e-06,
|
8923 |
+
"loss": 10.33,
|
8924 |
+
"step": 1269
|
8925 |
+
},
|
8926 |
+
{
|
8927 |
+
"epoch": 0.5378507146638433,
|
8928 |
+
"grad_norm": 0.055188797414302826,
|
8929 |
+
"learning_rate": 8.203870146740932e-06,
|
8930 |
+
"loss": 10.3266,
|
8931 |
+
"step": 1270
|
8932 |
+
},
|
8933 |
+
{
|
8934 |
+
"epoch": 0.5382742191635786,
|
8935 |
+
"grad_norm": 0.04392211511731148,
|
8936 |
+
"learning_rate": 8.118024427045912e-06,
|
8937 |
+
"loss": 10.3292,
|
8938 |
+
"step": 1271
|
8939 |
+
},
|
8940 |
+
{
|
8941 |
+
"epoch": 0.5386977236633139,
|
8942 |
+
"grad_norm": 0.05256279557943344,
|
8943 |
+
"learning_rate": 8.032611213987351e-06,
|
8944 |
+
"loss": 10.3306,
|
8945 |
+
"step": 1272
|
8946 |
+
},
|
8947 |
+
{
|
8948 |
+
"epoch": 0.5391212281630492,
|
8949 |
+
"grad_norm": 0.05281972140073776,
|
8950 |
+
"learning_rate": 7.947630909622216e-06,
|
8951 |
+
"loss": 10.3287,
|
8952 |
+
"step": 1273
|
8953 |
+
},
|
8954 |
+
{
|
8955 |
+
"epoch": 0.5395447326627846,
|
8956 |
+
"grad_norm": 0.03980804234743118,
|
8957 |
+
"learning_rate": 7.86308391396956e-06,
|
8958 |
+
"loss": 10.3271,
|
8959 |
+
"step": 1274
|
8960 |
+
},
|
8961 |
+
{
|
8962 |
+
"epoch": 0.5399682371625198,
|
8963 |
+
"grad_norm": 0.04108376055955887,
|
8964 |
+
"learning_rate": 7.77897062500883e-06,
|
8965 |
+
"loss": 10.3262,
|
8966 |
+
"step": 1275
|
8967 |
+
},
|
8968 |
+
{
|
8969 |
+
"epoch": 0.5403917416622551,
|
8970 |
+
"grad_norm": 0.041052039712667465,
|
8971 |
+
"learning_rate": 7.695291438677932e-06,
|
8972 |
+
"loss": 10.332,
|
8973 |
+
"step": 1276
|
8974 |
+
},
|
8975 |
+
{
|
8976 |
+
"epoch": 0.5408152461619905,
|
8977 |
+
"grad_norm": 0.0405026450753212,
|
8978 |
+
"learning_rate": 7.612046748871327e-06,
|
8979 |
+
"loss": 10.3303,
|
8980 |
+
"step": 1277
|
8981 |
+
},
|
8982 |
+
{
|
8983 |
+
"epoch": 0.5412387506617258,
|
8984 |
+
"grad_norm": 0.042605962604284286,
|
8985 |
+
"learning_rate": 7.529236947438256e-06,
|
8986 |
+
"loss": 10.33,
|
8987 |
+
"step": 1278
|
8988 |
+
},
|
8989 |
+
{
|
8990 |
+
"epoch": 0.541662255161461,
|
8991 |
+
"grad_norm": 0.04445753991603851,
|
8992 |
+
"learning_rate": 7.446862424180834e-06,
|
8993 |
+
"loss": 10.3292,
|
8994 |
+
"step": 1279
|
8995 |
+
},
|
8996 |
+
{
|
8997 |
+
"epoch": 0.5420857596611964,
|
8998 |
+
"grad_norm": 0.034195564687252045,
|
8999 |
+
"learning_rate": 7.3649235668522445e-06,
|
9000 |
+
"loss": 10.3322,
|
9001 |
+
"step": 1280
|
9002 |
+
},
|
9003 |
+
{
|
9004 |
+
"epoch": 0.5425092641609317,
|
9005 |
+
"grad_norm": 0.11420922726392746,
|
9006 |
+
"learning_rate": 7.28342076115488e-06,
|
9007 |
+
"loss": 10.3301,
|
9008 |
+
"step": 1281
|
9009 |
+
},
|
9010 |
+
{
|
9011 |
+
"epoch": 0.542932768660667,
|
9012 |
+
"grad_norm": 0.04385896399617195,
|
9013 |
+
"learning_rate": 7.202354390738608e-06,
|
9014 |
+
"loss": 10.3251,
|
9015 |
+
"step": 1282
|
9016 |
+
},
|
9017 |
+
{
|
9018 |
+
"epoch": 0.5433562731604024,
|
9019 |
+
"grad_norm": 0.04069971293210983,
|
9020 |
+
"learning_rate": 7.121724837198806e-06,
|
9021 |
+
"loss": 10.3309,
|
9022 |
+
"step": 1283
|
9023 |
+
},
|
9024 |
+
{
|
9025 |
+
"epoch": 0.5437797776601376,
|
9026 |
+
"grad_norm": 0.03832760453224182,
|
9027 |
+
"learning_rate": 7.04153248007482e-06,
|
9028 |
+
"loss": 10.3264,
|
9029 |
+
"step": 1284
|
9030 |
+
},
|
9031 |
+
{
|
9032 |
+
"epoch": 0.5442032821598729,
|
9033 |
+
"grad_norm": 0.038421642035245895,
|
9034 |
+
"learning_rate": 6.961777696847871e-06,
|
9035 |
+
"loss": 10.3276,
|
9036 |
+
"step": 1285
|
9037 |
+
},
|
9038 |
+
{
|
9039 |
+
"epoch": 0.5446267866596083,
|
9040 |
+
"grad_norm": 0.040812235325574875,
|
9041 |
+
"learning_rate": 6.882460862939522e-06,
|
9042 |
+
"loss": 10.3236,
|
9043 |
+
"step": 1286
|
9044 |
+
},
|
9045 |
+
{
|
9046 |
+
"epoch": 0.5450502911593436,
|
9047 |
+
"grad_norm": 0.03375053033232689,
|
9048 |
+
"learning_rate": 6.803582351709814e-06,
|
9049 |
+
"loss": 10.3268,
|
9050 |
+
"step": 1287
|
9051 |
+
},
|
9052 |
+
{
|
9053 |
+
"epoch": 0.5454737956590788,
|
9054 |
+
"grad_norm": 0.042095083743333817,
|
9055 |
+
"learning_rate": 6.725142534455487e-06,
|
9056 |
+
"loss": 10.3293,
|
9057 |
+
"step": 1288
|
9058 |
+
},
|
9059 |
+
{
|
9060 |
+
"epoch": 0.5458973001588142,
|
9061 |
+
"grad_norm": 0.048514217138290405,
|
9062 |
+
"learning_rate": 6.647141780408295e-06,
|
9063 |
+
"loss": 10.3336,
|
9064 |
+
"step": 1289
|
9065 |
+
},
|
9066 |
+
{
|
9067 |
+
"epoch": 0.5463208046585495,
|
9068 |
+
"grad_norm": 0.053984880447387695,
|
9069 |
+
"learning_rate": 6.5695804567332044e-06,
|
9070 |
+
"loss": 10.3268,
|
9071 |
+
"step": 1290
|
9072 |
+
},
|
9073 |
+
{
|
9074 |
+
"epoch": 0.5467443091582848,
|
9075 |
+
"grad_norm": 0.03971254080533981,
|
9076 |
+
"learning_rate": 6.492458928526723e-06,
|
9077 |
+
"loss": 10.3296,
|
9078 |
+
"step": 1291
|
9079 |
+
},
|
9080 |
+
{
|
9081 |
+
"epoch": 0.5471678136580201,
|
9082 |
+
"grad_norm": 0.03751399740576744,
|
9083 |
+
"learning_rate": 6.415777558815139e-06,
|
9084 |
+
"loss": 10.3297,
|
9085 |
+
"step": 1292
|
9086 |
+
},
|
9087 |
+
{
|
9088 |
+
"epoch": 0.5475913181577554,
|
9089 |
+
"grad_norm": 0.04779750481247902,
|
9090 |
+
"learning_rate": 6.339536708552829e-06,
|
9091 |
+
"loss": 10.3329,
|
9092 |
+
"step": 1293
|
9093 |
+
},
|
9094 |
+
{
|
9095 |
+
"epoch": 0.5480148226574907,
|
9096 |
+
"grad_norm": 0.054564010351896286,
|
9097 |
+
"learning_rate": 6.263736736620551e-06,
|
9098 |
+
"loss": 10.3244,
|
9099 |
+
"step": 1294
|
9100 |
+
},
|
9101 |
+
{
|
9102 |
+
"epoch": 0.5484383271572261,
|
9103 |
+
"grad_norm": 0.03428339958190918,
|
9104 |
+
"learning_rate": 6.188377999823758e-06,
|
9105 |
+
"loss": 10.3289,
|
9106 |
+
"step": 1295
|
9107 |
+
},
|
9108 |
+
{
|
9109 |
+
"epoch": 0.5488618316569613,
|
9110 |
+
"grad_norm": 0.04192102700471878,
|
9111 |
+
"learning_rate": 6.1134608528909735e-06,
|
9112 |
+
"loss": 10.3327,
|
9113 |
+
"step": 1296
|
9114 |
+
},
|
9115 |
+
{
|
9116 |
+
"epoch": 0.5492853361566966,
|
9117 |
+
"grad_norm": 0.033049482852220535,
|
9118 |
+
"learning_rate": 6.038985648471995e-06,
|
9119 |
+
"loss": 10.3264,
|
9120 |
+
"step": 1297
|
9121 |
+
},
|
9122 |
+
{
|
9123 |
+
"epoch": 0.549708840656432,
|
9124 |
+
"grad_norm": 0.05712836980819702,
|
9125 |
+
"learning_rate": 5.964952737136353e-06,
|
9126 |
+
"loss": 10.3263,
|
9127 |
+
"step": 1298
|
9128 |
+
},
|
9129 |
+
{
|
9130 |
+
"epoch": 0.5501323451561673,
|
9131 |
+
"grad_norm": 0.03678618744015694,
|
9132 |
+
"learning_rate": 5.891362467371597e-06,
|
9133 |
+
"loss": 10.3288,
|
9134 |
+
"step": 1299
|
9135 |
+
},
|
9136 |
+
{
|
9137 |
+
"epoch": 0.5505558496559025,
|
9138 |
+
"grad_norm": 0.038252294063568115,
|
9139 |
+
"learning_rate": 5.818215185581699e-06,
|
9140 |
+
"loss": 10.3251,
|
9141 |
+
"step": 1300
|
9142 |
+
},
|
9143 |
+
{
|
9144 |
+
"epoch": 0.5509793541556379,
|
9145 |
+
"grad_norm": 0.04190875589847565,
|
9146 |
+
"learning_rate": 5.7455112360854105e-06,
|
9147 |
+
"loss": 10.3248,
|
9148 |
+
"step": 1301
|
9149 |
+
},
|
9150 |
+
{
|
9151 |
+
"epoch": 0.5514028586553732,
|
9152 |
+
"grad_norm": 0.04964831843972206,
|
9153 |
+
"learning_rate": 5.673250961114529e-06,
|
9154 |
+
"loss": 10.3288,
|
9155 |
+
"step": 1302
|
9156 |
+
},
|
9157 |
+
{
|
9158 |
+
"epoch": 0.5518263631551085,
|
9159 |
+
"grad_norm": 0.037448253482580185,
|
9160 |
+
"learning_rate": 5.601434700812535e-06,
|
9161 |
+
"loss": 10.3258,
|
9162 |
+
"step": 1303
|
9163 |
+
},
|
9164 |
+
{
|
9165 |
+
"epoch": 0.5522498676548439,
|
9166 |
+
"grad_norm": 0.04193969443440437,
|
9167 |
+
"learning_rate": 5.530062793232771e-06,
|
9168 |
+
"loss": 10.3319,
|
9169 |
+
"step": 1304
|
9170 |
+
},
|
9171 |
+
{
|
9172 |
+
"epoch": 0.5526733721545791,
|
9173 |
+
"grad_norm": 0.04617246612906456,
|
9174 |
+
"learning_rate": 5.459135574336882e-06,
|
9175 |
+
"loss": 10.3282,
|
9176 |
+
"step": 1305
|
9177 |
+
},
|
9178 |
+
{
|
9179 |
+
"epoch": 0.5530968766543145,
|
9180 |
+
"grad_norm": 0.05252283066511154,
|
9181 |
+
"learning_rate": 5.388653377993324e-06,
|
9182 |
+
"loss": 10.3306,
|
9183 |
+
"step": 1306
|
9184 |
+
},
|
9185 |
+
{
|
9186 |
+
"epoch": 0.5535203811540498,
|
9187 |
+
"grad_norm": 0.0444052629172802,
|
9188 |
+
"learning_rate": 5.318616535975795e-06,
|
9189 |
+
"loss": 10.3282,
|
9190 |
+
"step": 1307
|
9191 |
+
},
|
9192 |
+
{
|
9193 |
+
"epoch": 0.553943885653785,
|
9194 |
+
"grad_norm": 0.034591346979141235,
|
9195 |
+
"learning_rate": 5.249025377961514e-06,
|
9196 |
+
"loss": 10.3281,
|
9197 |
+
"step": 1308
|
9198 |
+
},
|
9199 |
+
{
|
9200 |
+
"epoch": 0.5543673901535204,
|
9201 |
+
"grad_norm": 0.034414127469062805,
|
9202 |
+
"learning_rate": 5.179880231529843e-06,
|
9203 |
+
"loss": 10.3254,
|
9204 |
+
"step": 1309
|
9205 |
+
},
|
9206 |
+
{
|
9207 |
+
"epoch": 0.5547908946532557,
|
9208 |
+
"grad_norm": 0.04177490249276161,
|
9209 |
+
"learning_rate": 5.111181422160671e-06,
|
9210 |
+
"loss": 10.325,
|
9211 |
+
"step": 1310
|
9212 |
+
},
|
9213 |
+
{
|
9214 |
+
"epoch": 0.555214399152991,
|
9215 |
+
"grad_norm": 0.03989149257540703,
|
9216 |
+
"learning_rate": 5.042929273232877e-06,
|
9217 |
+
"loss": 10.3292,
|
9218 |
+
"step": 1311
|
9219 |
+
},
|
9220 |
+
{
|
9221 |
+
"epoch": 0.5556379036527264,
|
9222 |
+
"grad_norm": 0.045721426606178284,
|
9223 |
+
"learning_rate": 4.975124106022844e-06,
|
9224 |
+
"loss": 10.3232,
|
9225 |
+
"step": 1312
|
9226 |
+
},
|
9227 |
+
{
|
9228 |
+
"epoch": 0.5560614081524616,
|
9229 |
+
"grad_norm": 0.04311094433069229,
|
9230 |
+
"learning_rate": 4.907766239702871e-06,
|
9231 |
+
"loss": 10.3285,
|
9232 |
+
"step": 1313
|
9233 |
+
},
|
9234 |
+
{
|
9235 |
+
"epoch": 0.5564849126521969,
|
9236 |
+
"grad_norm": 0.03535007685422897,
|
9237 |
+
"learning_rate": 4.840855991339799e-06,
|
9238 |
+
"loss": 10.3269,
|
9239 |
+
"step": 1314
|
9240 |
+
},
|
9241 |
+
{
|
9242 |
+
"epoch": 0.5569084171519323,
|
9243 |
+
"grad_norm": 0.048799190670251846,
|
9244 |
+
"learning_rate": 4.774393675893407e-06,
|
9245 |
+
"loss": 10.3289,
|
9246 |
+
"step": 1315
|
9247 |
+
},
|
9248 |
+
{
|
9249 |
+
"epoch": 0.5573319216516676,
|
9250 |
+
"grad_norm": 0.0423654206097126,
|
9251 |
+
"learning_rate": 4.70837960621493e-06,
|
9252 |
+
"loss": 10.3266,
|
9253 |
+
"step": 1316
|
9254 |
+
},
|
9255 |
+
{
|
9256 |
+
"epoch": 0.5577554261514028,
|
9257 |
+
"grad_norm": 0.04994356259703636,
|
9258 |
+
"learning_rate": 4.642814093045655e-06,
|
9259 |
+
"loss": 10.3278,
|
9260 |
+
"step": 1317
|
9261 |
+
},
|
9262 |
+
{
|
9263 |
+
"epoch": 0.5581789306511382,
|
9264 |
+
"grad_norm": 0.037734173238277435,
|
9265 |
+
"learning_rate": 4.577697445015472e-06,
|
9266 |
+
"loss": 10.3268,
|
9267 |
+
"step": 1318
|
9268 |
+
},
|
9269 |
+
{
|
9270 |
+
"epoch": 0.5586024351508735,
|
9271 |
+
"grad_norm": 0.040003806352615356,
|
9272 |
+
"learning_rate": 4.51302996864128e-06,
|
9273 |
+
"loss": 10.3294,
|
9274 |
+
"step": 1319
|
9275 |
+
},
|
9276 |
+
{
|
9277 |
+
"epoch": 0.5590259396506088,
|
9278 |
+
"grad_norm": 0.03233201056718826,
|
9279 |
+
"learning_rate": 4.448811968325683e-06,
|
9280 |
+
"loss": 10.3272,
|
9281 |
+
"step": 1320
|
9282 |
+
},
|
9283 |
+
{
|
9284 |
+
"epoch": 0.5594494441503441,
|
9285 |
+
"grad_norm": 0.04166138172149658,
|
9286 |
+
"learning_rate": 4.385043746355544e-06,
|
9287 |
+
"loss": 10.3253,
|
9288 |
+
"step": 1321
|
9289 |
+
},
|
9290 |
+
{
|
9291 |
+
"epoch": 0.5598729486500794,
|
9292 |
+
"grad_norm": 0.035626303404569626,
|
9293 |
+
"learning_rate": 4.321725602900473e-06,
|
9294 |
+
"loss": 10.3329,
|
9295 |
+
"step": 1322
|
9296 |
+
},
|
9297 |
+
{
|
9298 |
+
"epoch": 0.5602964531498147,
|
9299 |
+
"grad_norm": 0.04178909212350845,
|
9300 |
+
"learning_rate": 4.2588578360115204e-06,
|
9301 |
+
"loss": 10.331,
|
9302 |
+
"step": 1323
|
9303 |
+
},
|
9304 |
+
{
|
9305 |
+
"epoch": 0.5607199576495501,
|
9306 |
+
"grad_norm": 0.03815459832549095,
|
9307 |
+
"learning_rate": 4.196440741619678e-06,
|
9308 |
+
"loss": 10.3257,
|
9309 |
+
"step": 1324
|
9310 |
+
},
|
9311 |
+
{
|
9312 |
+
"epoch": 0.5611434621492853,
|
9313 |
+
"grad_norm": 0.03888922557234764,
|
9314 |
+
"learning_rate": 4.134474613534556e-06,
|
9315 |
+
"loss": 10.3255,
|
9316 |
+
"step": 1325
|
9317 |
+
},
|
9318 |
+
{
|
9319 |
+
"epoch": 0.5615669666490206,
|
9320 |
+
"grad_norm": 0.05365046486258507,
|
9321 |
+
"learning_rate": 4.072959743443017e-06,
|
9322 |
+
"loss": 10.3265,
|
9323 |
+
"step": 1326
|
9324 |
+
},
|
9325 |
+
{
|
9326 |
+
"epoch": 0.561990471148756,
|
9327 |
+
"grad_norm": 0.0403805673122406,
|
9328 |
+
"learning_rate": 4.0118964209076795e-06,
|
9329 |
+
"loss": 10.327,
|
9330 |
+
"step": 1327
|
9331 |
+
},
|
9332 |
+
{
|
9333 |
+
"epoch": 0.5624139756484913,
|
9334 |
+
"grad_norm": 0.03630734607577324,
|
9335 |
+
"learning_rate": 3.9512849333657065e-06,
|
9336 |
+
"loss": 10.3267,
|
9337 |
+
"step": 1328
|
9338 |
+
},
|
9339 |
+
{
|
9340 |
+
"epoch": 0.5628374801482265,
|
9341 |
+
"grad_norm": 0.04850611835718155,
|
9342 |
+
"learning_rate": 3.891125566127352e-06,
|
9343 |
+
"loss": 10.333,
|
9344 |
+
"step": 1329
|
9345 |
+
},
|
9346 |
+
{
|
9347 |
+
"epoch": 0.5632609846479619,
|
9348 |
+
"grad_norm": 0.038192879408597946,
|
9349 |
+
"learning_rate": 3.83141860237467e-06,
|
9350 |
+
"loss": 10.3272,
|
9351 |
+
"step": 1330
|
9352 |
+
},
|
9353 |
+
{
|
9354 |
+
"epoch": 0.5636844891476972,
|
9355 |
+
"grad_norm": 0.034314315766096115,
|
9356 |
+
"learning_rate": 3.7721643231601434e-06,
|
9357 |
+
"loss": 10.3248,
|
9358 |
+
"step": 1331
|
9359 |
+
},
|
9360 |
+
{
|
9361 |
+
"epoch": 0.5641079936474325,
|
9362 |
+
"grad_norm": 0.03253219276666641,
|
9363 |
+
"learning_rate": 3.7133630074053794e-06,
|
9364 |
+
"loss": 10.3251,
|
9365 |
+
"step": 1332
|
9366 |
+
},
|
9367 |
+
{
|
9368 |
+
"epoch": 0.5645314981471679,
|
9369 |
+
"grad_norm": 0.0458807609975338,
|
9370 |
+
"learning_rate": 3.655014931899814e-06,
|
9371 |
+
"loss": 10.3291,
|
9372 |
+
"step": 1333
|
9373 |
+
},
|
9374 |
+
{
|
9375 |
+
"epoch": 0.5649550026469031,
|
9376 |
+
"grad_norm": 0.04000389948487282,
|
9377 |
+
"learning_rate": 3.5971203712993894e-06,
|
9378 |
+
"loss": 10.327,
|
9379 |
+
"step": 1334
|
9380 |
+
},
|
9381 |
+
{
|
9382 |
+
"epoch": 0.5653785071466384,
|
9383 |
+
"grad_norm": 0.03413333371281624,
|
9384 |
+
"learning_rate": 3.5396795981252563e-06,
|
9385 |
+
"loss": 10.3243,
|
9386 |
+
"step": 1335
|
9387 |
+
},
|
9388 |
+
{
|
9389 |
+
"epoch": 0.5658020116463738,
|
9390 |
+
"grad_norm": 0.0463162399828434,
|
9391 |
+
"learning_rate": 3.4826928827624617e-06,
|
9392 |
+
"loss": 10.3265,
|
9393 |
+
"step": 1336
|
9394 |
+
},
|
9395 |
+
{
|
9396 |
+
"epoch": 0.566225516146109,
|
9397 |
+
"grad_norm": 0.03909880667924881,
|
9398 |
+
"learning_rate": 3.4261604934587967e-06,
|
9399 |
+
"loss": 10.3309,
|
9400 |
+
"step": 1337
|
9401 |
+
},
|
9402 |
+
{
|
9403 |
+
"epoch": 0.5666490206458443,
|
9404 |
+
"grad_norm": 0.04727085679769516,
|
9405 |
+
"learning_rate": 3.3700826963233735e-06,
|
9406 |
+
"loss": 10.3312,
|
9407 |
+
"step": 1338
|
9408 |
+
},
|
9409 |
+
{
|
9410 |
+
"epoch": 0.5670725251455797,
|
9411 |
+
"grad_norm": 0.03862567991018295,
|
9412 |
+
"learning_rate": 3.314459755325483e-06,
|
9413 |
+
"loss": 10.3294,
|
9414 |
+
"step": 1339
|
9415 |
+
},
|
9416 |
+
{
|
9417 |
+
"epoch": 0.567496029645315,
|
9418 |
+
"grad_norm": 0.05173173174262047,
|
9419 |
+
"learning_rate": 3.259291932293318e-06,
|
9420 |
+
"loss": 10.3256,
|
9421 |
+
"step": 1340
|
9422 |
+
},
|
9423 |
+
{
|
9424 |
+
"epoch": 0.5679195341450503,
|
9425 |
+
"grad_norm": 0.04816534370183945,
|
9426 |
+
"learning_rate": 3.204579486912729e-06,
|
9427 |
+
"loss": 10.3299,
|
9428 |
+
"step": 1341
|
9429 |
+
},
|
9430 |
+
{
|
9431 |
+
"epoch": 0.5683430386447856,
|
9432 |
+
"grad_norm": 0.03575342521071434,
|
9433 |
+
"learning_rate": 3.1503226767260252e-06,
|
9434 |
+
"loss": 10.3301,
|
9435 |
+
"step": 1342
|
9436 |
+
},
|
9437 |
+
{
|
9438 |
+
"epoch": 0.5687665431445209,
|
9439 |
+
"grad_norm": 0.04038777947425842,
|
9440 |
+
"learning_rate": 3.0965217571307437e-06,
|
9441 |
+
"loss": 10.3307,
|
9442 |
+
"step": 1343
|
9443 |
+
},
|
9444 |
+
{
|
9445 |
+
"epoch": 0.5691900476442562,
|
9446 |
+
"grad_norm": 0.03827790170907974,
|
9447 |
+
"learning_rate": 3.0431769813784596e-06,
|
9448 |
+
"loss": 10.3282,
|
9449 |
+
"step": 1344
|
9450 |
+
},
|
9451 |
+
{
|
9452 |
+
"epoch": 0.5696135521439916,
|
9453 |
+
"grad_norm": 0.04328547790646553,
|
9454 |
+
"learning_rate": 2.9902886005735765e-06,
|
9455 |
+
"loss": 10.3281,
|
9456 |
+
"step": 1345
|
9457 |
+
},
|
9458 |
+
{
|
9459 |
+
"epoch": 0.5700370566437268,
|
9460 |
+
"grad_norm": 0.034784335643053055,
|
9461 |
+
"learning_rate": 2.9378568636721835e-06,
|
9462 |
+
"loss": 10.3285,
|
9463 |
+
"step": 1346
|
9464 |
+
},
|
9465 |
+
{
|
9466 |
+
"epoch": 0.5704605611434621,
|
9467 |
+
"grad_norm": 0.03946627303957939,
|
9468 |
+
"learning_rate": 2.885882017480812e-06,
|
9469 |
+
"loss": 10.3257,
|
9470 |
+
"step": 1347
|
9471 |
+
},
|
9472 |
+
{
|
9473 |
+
"epoch": 0.5708840656431975,
|
9474 |
+
"grad_norm": 0.03900349140167236,
|
9475 |
+
"learning_rate": 2.8343643066553793e-06,
|
9476 |
+
"loss": 10.3267,
|
9477 |
+
"step": 1348
|
9478 |
+
},
|
9479 |
+
{
|
9480 |
+
"epoch": 0.5713075701429328,
|
9481 |
+
"grad_norm": 0.03843264281749725,
|
9482 |
+
"learning_rate": 2.7833039736999245e-06,
|
9483 |
+
"loss": 10.3279,
|
9484 |
+
"step": 1349
|
9485 |
+
},
|
9486 |
+
{
|
9487 |
+
"epoch": 0.571731074642668,
|
9488 |
+
"grad_norm": 0.043401993811130524,
|
9489 |
+
"learning_rate": 2.732701258965531e-06,
|
9490 |
+
"loss": 10.3255,
|
9491 |
+
"step": 1350
|
9492 |
+
},
|
9493 |
+
{
|
9494 |
+
"epoch": 0.5721545791424034,
|
9495 |
+
"grad_norm": 0.03729039058089256,
|
9496 |
+
"learning_rate": 2.682556400649194e-06,
|
9497 |
+
"loss": 10.3301,
|
9498 |
+
"step": 1351
|
9499 |
+
},
|
9500 |
+
{
|
9501 |
+
"epoch": 0.5725780836421387,
|
9502 |
+
"grad_norm": 0.04055764526128769,
|
9503 |
+
"learning_rate": 2.6328696347926784e-06,
|
9504 |
+
"loss": 10.3284,
|
9505 |
+
"step": 1352
|
9506 |
+
},
|
9507 |
+
{
|
9508 |
+
"epoch": 0.573001588141874,
|
9509 |
+
"grad_norm": 0.03590220585465431,
|
9510 |
+
"learning_rate": 2.5836411952814277e-06,
|
9511 |
+
"loss": 10.33,
|
9512 |
+
"step": 1353
|
9513 |
+
},
|
9514 |
+
{
|
9515 |
+
"epoch": 0.5734250926416093,
|
9516 |
+
"grad_norm": 0.04289846867322922,
|
9517 |
+
"learning_rate": 2.5348713138434564e-06,
|
9518 |
+
"loss": 10.3268,
|
9519 |
+
"step": 1354
|
9520 |
+
},
|
9521 |
+
{
|
9522 |
+
"epoch": 0.5738485971413446,
|
9523 |
+
"grad_norm": 0.04050644859671593,
|
9524 |
+
"learning_rate": 2.4865602200482065e-06,
|
9525 |
+
"loss": 10.3255,
|
9526 |
+
"step": 1355
|
9527 |
+
},
|
9528 |
+
{
|
9529 |
+
"epoch": 0.5742721016410799,
|
9530 |
+
"grad_norm": 0.03876882046461105,
|
9531 |
+
"learning_rate": 2.4387081413055903e-06,
|
9532 |
+
"loss": 10.3263,
|
9533 |
+
"step": 1356
|
9534 |
+
},
|
9535 |
+
{
|
9536 |
+
"epoch": 0.5746956061408153,
|
9537 |
+
"grad_norm": 0.04008372128009796,
|
9538 |
+
"learning_rate": 2.391315302864805e-06,
|
9539 |
+
"loss": 10.3246,
|
9540 |
+
"step": 1357
|
9541 |
+
},
|
9542 |
+
{
|
9543 |
+
"epoch": 0.5751191106405505,
|
9544 |
+
"grad_norm": 0.03661615401506424,
|
9545 |
+
"learning_rate": 2.3443819278132996e-06,
|
9546 |
+
"loss": 10.3296,
|
9547 |
+
"step": 1358
|
9548 |
+
},
|
9549 |
+
{
|
9550 |
+
"epoch": 0.5755426151402858,
|
9551 |
+
"grad_norm": 0.044347479939460754,
|
9552 |
+
"learning_rate": 2.297908237075763e-06,
|
9553 |
+
"loss": 10.3268,
|
9554 |
+
"step": 1359
|
9555 |
+
},
|
9556 |
+
{
|
9557 |
+
"epoch": 0.5759661196400212,
|
9558 |
+
"grad_norm": 0.04110342264175415,
|
9559 |
+
"learning_rate": 2.251894449413061e-06,
|
9560 |
+
"loss": 10.3275,
|
9561 |
+
"step": 1360
|
9562 |
+
},
|
9563 |
+
{
|
9564 |
+
"epoch": 0.5763896241397565,
|
9565 |
+
"grad_norm": 0.043472178280353546,
|
9566 |
+
"learning_rate": 2.20634078142119e-06,
|
9567 |
+
"loss": 10.3329,
|
9568 |
+
"step": 1361
|
9569 |
+
},
|
9570 |
+
{
|
9571 |
+
"epoch": 0.5768131286394917,
|
9572 |
+
"grad_norm": 0.04407213255763054,
|
9573 |
+
"learning_rate": 2.161247447530268e-06,
|
9574 |
+
"loss": 10.328,
|
9575 |
+
"step": 1362
|
9576 |
+
},
|
9577 |
+
{
|
9578 |
+
"epoch": 0.5772366331392271,
|
9579 |
+
"grad_norm": 0.04149101674556732,
|
9580 |
+
"learning_rate": 2.1166146600035576e-06,
|
9581 |
+
"loss": 10.3267,
|
9582 |
+
"step": 1363
|
9583 |
+
},
|
9584 |
+
{
|
9585 |
+
"epoch": 0.5776601376389624,
|
9586 |
+
"grad_norm": 0.049089882522821426,
|
9587 |
+
"learning_rate": 2.0724426289363995e-06,
|
9588 |
+
"loss": 10.3284,
|
9589 |
+
"step": 1364
|
9590 |
+
},
|
9591 |
+
{
|
9592 |
+
"epoch": 0.5780836421386977,
|
9593 |
+
"grad_norm": 0.05793222039937973,
|
9594 |
+
"learning_rate": 2.028731562255304e-06,
|
9595 |
+
"loss": 10.3236,
|
9596 |
+
"step": 1365
|
9597 |
+
},
|
9598 |
+
{
|
9599 |
+
"epoch": 0.5785071466384331,
|
9600 |
+
"grad_norm": 0.03534723445773125,
|
9601 |
+
"learning_rate": 1.985481665716882e-06,
|
9602 |
+
"loss": 10.3293,
|
9603 |
+
"step": 1366
|
9604 |
+
},
|
9605 |
+
{
|
9606 |
+
"epoch": 0.5789306511381683,
|
9607 |
+
"grad_norm": 0.0394221730530262,
|
9608 |
+
"learning_rate": 1.9426931429069706e-06,
|
9609 |
+
"loss": 10.3271,
|
9610 |
+
"step": 1367
|
9611 |
+
},
|
9612 |
+
{
|
9613 |
+
"epoch": 0.5793541556379036,
|
9614 |
+
"grad_norm": 0.04738519713282585,
|
9615 |
+
"learning_rate": 1.9003661952396224e-06,
|
9616 |
+
"loss": 10.3337,
|
9617 |
+
"step": 1368
|
9618 |
+
},
|
9619 |
+
{
|
9620 |
+
"epoch": 0.579777660137639,
|
9621 |
+
"grad_norm": 0.04297514259815216,
|
9622 |
+
"learning_rate": 1.8585010219561273e-06,
|
9623 |
+
"loss": 10.3315,
|
9624 |
+
"step": 1369
|
9625 |
+
},
|
9626 |
+
{
|
9627 |
+
"epoch": 0.5802011646373743,
|
9628 |
+
"grad_norm": 0.05005791783332825,
|
9629 |
+
"learning_rate": 1.8170978201241474e-06,
|
9630 |
+
"loss": 10.3296,
|
9631 |
+
"step": 1370
|
9632 |
+
},
|
9633 |
+
{
|
9634 |
+
"epoch": 0.5806246691371095,
|
9635 |
+
"grad_norm": 0.055756572633981705,
|
9636 |
+
"learning_rate": 1.776156784636762e-06,
|
9637 |
+
"loss": 10.3288,
|
9638 |
+
"step": 1371
|
9639 |
+
},
|
9640 |
+
{
|
9641 |
+
"epoch": 0.5810481736368449,
|
9642 |
+
"grad_norm": 0.04473468288779259,
|
9643 |
+
"learning_rate": 1.7356781082115026e-06,
|
9644 |
+
"loss": 10.3314,
|
9645 |
+
"step": 1372
|
9646 |
+
},
|
9647 |
+
{
|
9648 |
+
"epoch": 0.5814716781365802,
|
9649 |
+
"grad_norm": 0.034649983048439026,
|
9650 |
+
"learning_rate": 1.695661981389518e-06,
|
9651 |
+
"loss": 10.3329,
|
9652 |
+
"step": 1373
|
9653 |
+
},
|
9654 |
+
{
|
9655 |
+
"epoch": 0.5818951826363155,
|
9656 |
+
"grad_norm": 0.04096787050366402,
|
9657 |
+
"learning_rate": 1.6561085925346332e-06,
|
9658 |
+
"loss": 10.3295,
|
9659 |
+
"step": 1374
|
9660 |
+
},
|
9661 |
+
{
|
9662 |
+
"epoch": 0.5823186871360508,
|
9663 |
+
"grad_norm": 0.052380774170160294,
|
9664 |
+
"learning_rate": 1.6170181278324815e-06,
|
9665 |
+
"loss": 10.33,
|
9666 |
+
"step": 1375
|
9667 |
+
},
|
9668 |
+
{
|
9669 |
+
"epoch": 0.5827421916357861,
|
9670 |
+
"grad_norm": 0.041889987885951996,
|
9671 |
+
"learning_rate": 1.5783907712896062e-06,
|
9672 |
+
"loss": 10.3283,
|
9673 |
+
"step": 1376
|
9674 |
+
},
|
9675 |
+
{
|
9676 |
+
"epoch": 0.5831656961355215,
|
9677 |
+
"grad_norm": 0.040269892662763596,
|
9678 |
+
"learning_rate": 1.5402267047325947e-06,
|
9679 |
+
"loss": 10.3309,
|
9680 |
+
"step": 1377
|
9681 |
+
},
|
9682 |
+
{
|
9683 |
+
"epoch": 0.5835892006352568,
|
9684 |
+
"grad_norm": 0.03972173109650612,
|
9685 |
+
"learning_rate": 1.5025261078073005e-06,
|
9686 |
+
"loss": 10.3327,
|
9687 |
+
"step": 1378
|
9688 |
+
},
|
9689 |
+
{
|
9690 |
+
"epoch": 0.584012705134992,
|
9691 |
+
"grad_norm": 0.042261023074388504,
|
9692 |
+
"learning_rate": 1.465289157977867e-06,
|
9693 |
+
"loss": 10.3286,
|
9694 |
+
"step": 1379
|
9695 |
+
},
|
9696 |
+
{
|
9697 |
+
"epoch": 0.5844362096347274,
|
9698 |
+
"grad_norm": 0.04388967156410217,
|
9699 |
+
"learning_rate": 1.4285160305259836e-06,
|
9700 |
+
"loss": 10.3273,
|
9701 |
+
"step": 1380
|
9702 |
+
},
|
9703 |
+
{
|
9704 |
+
"epoch": 0.5848597141344627,
|
9705 |
+
"grad_norm": 0.05625876411795616,
|
9706 |
+
"learning_rate": 1.3922068985500414e-06,
|
9707 |
+
"loss": 10.3231,
|
9708 |
+
"step": 1381
|
9709 |
+
},
|
9710 |
+
{
|
9711 |
+
"epoch": 0.585283218634198,
|
9712 |
+
"grad_norm": 0.039795175194740295,
|
9713 |
+
"learning_rate": 1.3563619329643119e-06,
|
9714 |
+
"loss": 10.3295,
|
9715 |
+
"step": 1382
|
9716 |
+
},
|
9717 |
+
{
|
9718 |
+
"epoch": 0.5857067231339333,
|
9719 |
+
"grad_norm": 0.033558834344148636,
|
9720 |
+
"learning_rate": 1.3209813024981255e-06,
|
9721 |
+
"loss": 10.3284,
|
9722 |
+
"step": 1383
|
9723 |
+
},
|
9724 |
+
{
|
9725 |
+
"epoch": 0.5861302276336686,
|
9726 |
+
"grad_norm": 0.034402813762426376,
|
9727 |
+
"learning_rate": 1.286065173695128e-06,
|
9728 |
+
"loss": 10.3296,
|
9729 |
+
"step": 1384
|
9730 |
+
},
|
9731 |
+
{
|
9732 |
+
"epoch": 0.5865537321334039,
|
9733 |
+
"grad_norm": 0.038992416113615036,
|
9734 |
+
"learning_rate": 1.251613710912425e-06,
|
9735 |
+
"loss": 10.3325,
|
9736 |
+
"step": 1385
|
9737 |
+
},
|
9738 |
+
{
|
9739 |
+
"epoch": 0.5869772366331393,
|
9740 |
+
"grad_norm": 0.04365801811218262,
|
9741 |
+
"learning_rate": 1.2176270763198828e-06,
|
9742 |
+
"loss": 10.3289,
|
9743 |
+
"step": 1386
|
9744 |
+
},
|
9745 |
+
{
|
9746 |
+
"epoch": 0.5874007411328745,
|
9747 |
+
"grad_norm": 0.04500606283545494,
|
9748 |
+
"learning_rate": 1.184105429899318e-06,
|
9749 |
+
"loss": 10.3268,
|
9750 |
+
"step": 1387
|
9751 |
+
},
|
9752 |
+
{
|
9753 |
+
"epoch": 0.5878242456326098,
|
9754 |
+
"grad_norm": 0.033053088933229446,
|
9755 |
+
"learning_rate": 1.1510489294437432e-06,
|
9756 |
+
"loss": 10.3259,
|
9757 |
+
"step": 1388
|
9758 |
+
},
|
9759 |
+
{
|
9760 |
+
"epoch": 0.5882477501323452,
|
9761 |
+
"grad_norm": 0.04001301899552345,
|
9762 |
+
"learning_rate": 1.1184577305566323e-06,
|
9763 |
+
"loss": 10.3314,
|
9764 |
+
"step": 1389
|
9765 |
+
},
|
9766 |
+
{
|
9767 |
+
"epoch": 0.5886712546320805,
|
9768 |
+
"grad_norm": 0.042015451937913895,
|
9769 |
+
"learning_rate": 1.0863319866512346e-06,
|
9770 |
+
"loss": 10.3268,
|
9771 |
+
"step": 1390
|
9772 |
+
},
|
9773 |
+
{
|
9774 |
+
"epoch": 0.5890947591318157,
|
9775 |
+
"grad_norm": 0.0645761638879776,
|
9776 |
+
"learning_rate": 1.054671848949751e-06,
|
9777 |
+
"loss": 10.3235,
|
9778 |
+
"step": 1391
|
9779 |
+
},
|
9780 |
+
{
|
9781 |
+
"epoch": 0.5895182636315511,
|
9782 |
+
"grad_norm": 0.03458908945322037,
|
9783 |
+
"learning_rate": 1.0234774664827474e-06,
|
9784 |
+
"loss": 10.3268,
|
9785 |
+
"step": 1392
|
9786 |
+
},
|
9787 |
+
{
|
9788 |
+
"epoch": 0.5899417681312864,
|
9789 |
+
"grad_norm": 0.030142279341816902,
|
9790 |
+
"learning_rate": 9.92748986088321e-07,
|
9791 |
+
"loss": 10.3292,
|
9792 |
+
"step": 1393
|
9793 |
+
},
|
9794 |
+
{
|
9795 |
+
"epoch": 0.5903652726310217,
|
9796 |
+
"grad_norm": 0.03560309112071991,
|
9797 |
+
"learning_rate": 9.624865524115346e-07,
|
9798 |
+
"loss": 10.3252,
|
9799 |
+
"step": 1394
|
9800 |
+
},
|
9801 |
+
{
|
9802 |
+
"epoch": 0.5907887771307571,
|
9803 |
+
"grad_norm": 0.0334932841360569,
|
9804 |
+
"learning_rate": 9.326903079036275e-07,
|
9805 |
+
"loss": 10.3344,
|
9806 |
+
"step": 1395
|
9807 |
+
},
|
9808 |
+
{
|
9809 |
+
"epoch": 0.5912122816304923,
|
9810 |
+
"grad_norm": 0.035931240767240524,
|
9811 |
+
"learning_rate": 9.033603928214395e-07,
|
9812 |
+
"loss": 10.3315,
|
9813 |
+
"step": 1396
|
9814 |
+
},
|
9815 |
+
{
|
9816 |
+
"epoch": 0.5916357861302276,
|
9817 |
+
"grad_norm": 0.03553161397576332,
|
9818 |
+
"learning_rate": 8.74496945226666e-07,
|
9819 |
+
"loss": 10.3264,
|
9820 |
+
"step": 1397
|
9821 |
+
},
|
9822 |
+
{
|
9823 |
+
"epoch": 0.592059290629963,
|
9824 |
+
"grad_norm": 0.04423438757658005,
|
9825 |
+
"learning_rate": 8.461001009852809e-07,
|
9826 |
+
"loss": 10.331,
|
9827 |
+
"step": 1398
|
9828 |
+
},
|
9829 |
+
{
|
9830 |
+
"epoch": 0.5924827951296983,
|
9831 |
+
"grad_norm": 0.04461585730314255,
|
9832 |
+
"learning_rate": 8.181699937668374e-07,
|
9833 |
+
"loss": 10.3315,
|
9834 |
+
"step": 1399
|
9835 |
+
},
|
9836 |
+
{
|
9837 |
+
"epoch": 0.5929062996294335,
|
9838 |
+
"grad_norm": 0.06595886498689651,
|
9839 |
+
"learning_rate": 7.907067550438685e-07,
|
9840 |
+
"loss": 10.3247,
|
9841 |
+
"step": 1400
|
9842 |
+
},
|
9843 |
+
{
|
9844 |
+
"epoch": 0.5933298041291689,
|
9845 |
+
"grad_norm": 0.03853926435112953,
|
9846 |
+
"learning_rate": 7.63710514091287e-07,
|
9847 |
+
"loss": 10.328,
|
9848 |
+
"step": 1401
|
9849 |
+
},
|
9850 |
+
{
|
9851 |
+
"epoch": 0.5937533086289042,
|
9852 |
+
"grad_norm": 0.05035213753581047,
|
9853 |
+
"learning_rate": 7.371813979857312e-07,
|
9854 |
+
"loss": 10.3258,
|
9855 |
+
"step": 1402
|
9856 |
+
},
|
9857 |
+
{
|
9858 |
+
"epoch": 0.5941768131286395,
|
9859 |
+
"grad_norm": 0.04313475638628006,
|
9860 |
+
"learning_rate": 7.111195316049979e-07,
|
9861 |
+
"loss": 10.329,
|
9862 |
+
"step": 1403
|
9863 |
+
},
|
9864 |
+
{
|
9865 |
+
"epoch": 0.5946003176283748,
|
9866 |
+
"grad_norm": 0.03996354341506958,
|
9867 |
+
"learning_rate": 6.855250376274547e-07,
|
9868 |
+
"loss": 10.3299,
|
9869 |
+
"step": 1404
|
9870 |
+
},
|
9871 |
+
{
|
9872 |
+
"epoch": 0.5950238221281101,
|
9873 |
+
"grad_norm": 0.03343300521373749,
|
9874 |
+
"learning_rate": 6.603980365314399e-07,
|
9875 |
+
"loss": 10.3302,
|
9876 |
+
"step": 1405
|
9877 |
+
},
|
9878 |
+
{
|
9879 |
+
"epoch": 0.5954473266278454,
|
9880 |
+
"grad_norm": 0.04570624977350235,
|
9881 |
+
"learning_rate": 6.357386465947301e-07,
|
9882 |
+
"loss": 10.3271,
|
9883 |
+
"step": 1406
|
9884 |
+
},
|
9885 |
+
{
|
9886 |
+
"epoch": 0.5958708311275808,
|
9887 |
+
"grad_norm": 0.037157222628593445,
|
9888 |
+
"learning_rate": 6.115469838939513e-07,
|
9889 |
+
"loss": 10.3294,
|
9890 |
+
"step": 1407
|
9891 |
+
},
|
9892 |
+
{
|
9893 |
+
"epoch": 0.596294335627316,
|
9894 |
+
"grad_norm": 0.03819438815116882,
|
9895 |
+
"learning_rate": 5.878231623040242e-07,
|
9896 |
+
"loss": 10.3326,
|
9897 |
+
"step": 1408
|
9898 |
+
},
|
9899 |
+
{
|
9900 |
+
"epoch": 0.5967178401270513,
|
9901 |
+
"grad_norm": 0.03227882459759712,
|
9902 |
+
"learning_rate": 5.645672934976975e-07,
|
9903 |
+
"loss": 10.3282,
|
9904 |
+
"step": 1409
|
9905 |
+
},
|
9906 |
+
{
|
9907 |
+
"epoch": 0.5971413446267867,
|
9908 |
+
"grad_norm": 0.038912806659936905,
|
9909 |
+
"learning_rate": 5.417794869449377e-07,
|
9910 |
+
"loss": 10.3277,
|
9911 |
+
"step": 1410
|
9912 |
+
},
|
9913 |
+
{
|
9914 |
+
"epoch": 0.597564849126522,
|
9915 |
+
"grad_norm": 0.03794252499938011,
|
9916 |
+
"learning_rate": 5.194598499124514e-07,
|
9917 |
+
"loss": 10.3305,
|
9918 |
+
"step": 1411
|
9919 |
+
},
|
9920 |
+
{
|
9921 |
+
"epoch": 0.5979883536262572,
|
9922 |
+
"grad_norm": 0.03023924119770527,
|
9923 |
+
"learning_rate": 4.97608487463197e-07,
|
9924 |
+
"loss": 10.325,
|
9925 |
+
"step": 1412
|
9926 |
+
},
|
9927 |
+
{
|
9928 |
+
"epoch": 0.5984118581259926,
|
9929 |
+
"grad_norm": 0.03531888127326965,
|
9930 |
+
"learning_rate": 4.762255024558515e-07,
|
9931 |
+
"loss": 10.3255,
|
9932 |
+
"step": 1413
|
9933 |
+
},
|
9934 |
+
{
|
9935 |
+
"epoch": 0.5988353626257279,
|
9936 |
+
"grad_norm": 0.04441700130701065,
|
9937 |
+
"learning_rate": 4.5531099554435576e-07,
|
9938 |
+
"loss": 10.3261,
|
9939 |
+
"step": 1414
|
9940 |
+
},
|
9941 |
+
{
|
9942 |
+
"epoch": 0.5992588671254632,
|
9943 |
+
"grad_norm": 0.042937856167554855,
|
9944 |
+
"learning_rate": 4.3486506517743664e-07,
|
9945 |
+
"loss": 10.3296,
|
9946 |
+
"step": 1415
|
9947 |
+
},
|
9948 |
+
{
|
9949 |
+
"epoch": 0.5996823716251986,
|
9950 |
+
"grad_norm": 0.04423859715461731,
|
9951 |
+
"learning_rate": 4.1488780759812995e-07,
|
9952 |
+
"loss": 10.3287,
|
9953 |
+
"step": 1416
|
9954 |
+
},
|
9955 |
+
{
|
9956 |
+
"epoch": 0.6001058761249338,
|
9957 |
+
"grad_norm": 0.0638049766421318,
|
9958 |
+
"learning_rate": 3.9537931684333617e-07,
|
9959 |
+
"loss": 10.3295,
|
9960 |
+
"step": 1417
|
9961 |
+
},
|
9962 |
+
{
|
9963 |
+
"epoch": 0.6005293806246691,
|
9964 |
+
"grad_norm": 0.0391959622502327,
|
9965 |
+
"learning_rate": 3.763396847433875e-07,
|
9966 |
+
"loss": 10.3311,
|
9967 |
+
"step": 1418
|
9968 |
+
},
|
9969 |
+
{
|
9970 |
+
"epoch": 0.6009528851244045,
|
9971 |
+
"grad_norm": 0.03398942947387695,
|
9972 |
+
"learning_rate": 3.5776900092159284e-07,
|
9973 |
+
"loss": 10.3331,
|
9974 |
+
"step": 1419
|
9975 |
+
},
|
9976 |
+
{
|
9977 |
+
"epoch": 0.6013763896241398,
|
9978 |
+
"grad_norm": 0.04590810090303421,
|
9979 |
+
"learning_rate": 3.3966735279384875e-07,
|
9980 |
+
"loss": 10.329,
|
9981 |
+
"step": 1420
|
9982 |
+
},
|
9983 |
+
{
|
9984 |
+
"epoch": 0.601799894123875,
|
9985 |
+
"grad_norm": 0.05416841059923172,
|
9986 |
+
"learning_rate": 3.220348255681738e-07,
|
9987 |
+
"loss": 10.3258,
|
9988 |
+
"step": 1421
|
9989 |
+
},
|
9990 |
+
{
|
9991 |
+
"epoch": 0.6022233986236104,
|
9992 |
+
"grad_norm": 0.041575055569410324,
|
9993 |
+
"learning_rate": 3.048715022443749e-07,
|
9994 |
+
"loss": 10.3251,
|
9995 |
+
"step": 1422
|
9996 |
+
},
|
9997 |
+
{
|
9998 |
+
"epoch": 0.6026469031233457,
|
9999 |
+
"grad_norm": 0.035289980471134186,
|
10000 |
+
"learning_rate": 2.881774636136036e-07,
|
10001 |
+
"loss": 10.3278,
|
10002 |
+
"step": 1423
|
10003 |
+
},
|
10004 |
+
{
|
10005 |
+
"epoch": 0.603070407623081,
|
10006 |
+
"grad_norm": 0.02908286452293396,
|
10007 |
+
"learning_rate": 2.7195278825801195e-07,
|
10008 |
+
"loss": 10.3303,
|
10009 |
+
"step": 1424
|
10010 |
+
},
|
10011 |
+
{
|
10012 |
+
"epoch": 0.6034939121228163,
|
10013 |
+
"grad_norm": 0.05550592392683029,
|
10014 |
+
"learning_rate": 2.561975525503524e-07,
|
10015 |
+
"loss": 10.3252,
|
10016 |
+
"step": 1425
|
10017 |
+
},
|
10018 |
+
{
|
10019 |
+
"epoch": 0.6039174166225516,
|
10020 |
+
"grad_norm": 0.037263430655002594,
|
10021 |
+
"learning_rate": 2.409118306536229e-07,
|
10022 |
+
"loss": 10.3271,
|
10023 |
+
"step": 1426
|
10024 |
+
},
|
10025 |
+
{
|
10026 |
+
"epoch": 0.6043409211222869,
|
10027 |
+
"grad_norm": 0.0358741395175457,
|
10028 |
+
"learning_rate": 2.2609569452076707e-07,
|
10029 |
+
"loss": 10.3285,
|
10030 |
+
"step": 1427
|
10031 |
+
},
|
10032 |
+
{
|
10033 |
+
"epoch": 0.6047644256220223,
|
10034 |
+
"grad_norm": 0.03653440624475479,
|
10035 |
+
"learning_rate": 2.117492138942412e-07,
|
10036 |
+
"loss": 10.3271,
|
10037 |
+
"step": 1428
|
10038 |
+
},
|
10039 |
+
{
|
10040 |
+
"epoch": 0.6051879301217575,
|
10041 |
+
"grad_norm": 0.03587844595313072,
|
10042 |
+
"learning_rate": 1.978724563057921e-07,
|
10043 |
+
"loss": 10.329,
|
10044 |
+
"step": 1429
|
10045 |
+
},
|
10046 |
+
{
|
10047 |
+
"epoch": 0.6056114346214928,
|
10048 |
+
"grad_norm": 0.03418637067079544,
|
10049 |
+
"learning_rate": 1.8446548707604648e-07,
|
10050 |
+
"loss": 10.3298,
|
10051 |
+
"step": 1430
|
10052 |
+
},
|
10053 |
+
{
|
10054 |
+
"epoch": 0.6060349391212282,
|
10055 |
+
"grad_norm": 0.04142700880765915,
|
10056 |
+
"learning_rate": 1.7152836931425554e-07,
|
10057 |
+
"loss": 10.3298,
|
10058 |
+
"step": 1431
|
10059 |
+
},
|
10060 |
+
{
|
10061 |
+
"epoch": 0.6064584436209635,
|
10062 |
+
"grad_norm": 0.03256484493613243,
|
10063 |
+
"learning_rate": 1.5906116391801728e-07,
|
10064 |
+
"loss": 10.3319,
|
10065 |
+
"step": 1432
|
10066 |
+
},
|
10067 |
+
{
|
10068 |
+
"epoch": 0.6068819481206987,
|
10069 |
+
"grad_norm": 0.038411945104599,
|
10070 |
+
"learning_rate": 1.4706392957292147e-07,
|
10071 |
+
"loss": 10.3247,
|
10072 |
+
"step": 1433
|
10073 |
+
},
|
10074 |
+
{
|
10075 |
+
"epoch": 0.6073054526204341,
|
10076 |
+
"grad_norm": 0.041494399309158325,
|
10077 |
+
"learning_rate": 1.3553672275230523e-07,
|
10078 |
+
"loss": 10.3246,
|
10079 |
+
"step": 1434
|
10080 |
+
},
|
10081 |
+
{
|
10082 |
+
"epoch": 0.6077289571201694,
|
10083 |
+
"grad_norm": 0.033497005701065063,
|
10084 |
+
"learning_rate": 1.244795977170421e-07,
|
10085 |
+
"loss": 10.3301,
|
10086 |
+
"step": 1435
|
10087 |
+
},
|
10088 |
+
{
|
10089 |
+
"epoch": 0.6081524616199047,
|
10090 |
+
"grad_norm": 0.039331309497356415,
|
10091 |
+
"learning_rate": 1.1389260651518685e-07,
|
10092 |
+
"loss": 10.3292,
|
10093 |
+
"step": 1436
|
10094 |
+
},
|
10095 |
+
{
|
10096 |
+
"epoch": 0.60857596611964,
|
10097 |
+
"grad_norm": 0.03892797604203224,
|
10098 |
+
"learning_rate": 1.037757989818089e-07,
|
10099 |
+
"loss": 10.3303,
|
10100 |
+
"step": 1437
|
10101 |
+
},
|
10102 |
+
{
|
10103 |
+
"epoch": 0.6089994706193753,
|
10104 |
+
"grad_norm": 0.04775160551071167,
|
10105 |
+
"learning_rate": 9.412922273871471e-08,
|
10106 |
+
"loss": 10.3313,
|
10107 |
+
"step": 1438
|
10108 |
+
},
|
10109 |
+
{
|
10110 |
+
"epoch": 0.6094229751191106,
|
10111 |
+
"grad_norm": 0.036971334367990494,
|
10112 |
+
"learning_rate": 8.495292319425918e-08,
|
10113 |
+
"loss": 10.3305,
|
10114 |
+
"step": 1439
|
10115 |
+
},
|
10116 |
+
{
|
10117 |
+
"epoch": 0.609846479618846,
|
10118 |
+
"grad_norm": 0.044706303626298904,
|
10119 |
+
"learning_rate": 7.624694354309014e-08,
|
10120 |
+
"loss": 10.3263,
|
10121 |
+
"step": 1440
|
10122 |
+
},
|
10123 |
+
{
|
10124 |
+
"epoch": 0.6102699841185812,
|
10125 |
+
"grad_norm": 0.04051651433110237,
|
10126 |
+
"learning_rate": 6.801132476598194e-08,
|
10127 |
+
"loss": 10.3252,
|
10128 |
+
"step": 1441
|
10129 |
+
},
|
10130 |
+
{
|
10131 |
+
"epoch": 0.6106934886183165,
|
10132 |
+
"grad_norm": 0.053238075226545334,
|
10133 |
+
"learning_rate": 6.024610562962441e-08,
|
10134 |
+
"loss": 10.33,
|
10135 |
+
"step": 1442
|
10136 |
+
},
|
10137 |
+
{
|
10138 |
+
"epoch": 0.6111169931180519,
|
10139 |
+
"grad_norm": 0.04244618862867355,
|
10140 |
+
"learning_rate": 5.2951322686434216e-08,
|
10141 |
+
"loss": 10.3264,
|
10142 |
+
"step": 1443
|
10143 |
+
},
|
10144 |
+
{
|
10145 |
+
"epoch": 0.6115404976177872,
|
10146 |
+
"grad_norm": 0.03795997425913811,
|
10147 |
+
"learning_rate": 4.6127010274399364e-08,
|
10148 |
+
"loss": 10.3282,
|
10149 |
+
"step": 1444
|
10150 |
+
},
|
10151 |
+
{
|
10152 |
+
"epoch": 0.6119640021175226,
|
10153 |
+
"grad_norm": 0.051705945283174515,
|
10154 |
+
"learning_rate": 3.977320051690159e-08,
|
10155 |
+
"loss": 10.3323,
|
10156 |
+
"step": 1445
|
10157 |
+
},
|
10158 |
+
{
|
10159 |
+
"epoch": 0.6123875066172578,
|
10160 |
+
"grad_norm": 0.04325444623827934,
|
10161 |
+
"learning_rate": 3.388992332259422e-08,
|
10162 |
+
"loss": 10.3286,
|
10163 |
+
"step": 1446
|
10164 |
+
},
|
10165 |
+
{
|
10166 |
+
"epoch": 0.6128110111169931,
|
10167 |
+
"grad_norm": 0.03884377330541611,
|
10168 |
+
"learning_rate": 2.8477206385213452e-08,
|
10169 |
+
"loss": 10.328,
|
10170 |
+
"step": 1447
|
10171 |
+
},
|
10172 |
+
{
|
10173 |
+
"epoch": 0.6132345156167285,
|
10174 |
+
"grad_norm": 0.03975790739059448,
|
10175 |
+
"learning_rate": 2.353507518350062e-08,
|
10176 |
+
"loss": 10.3322,
|
10177 |
+
"step": 1448
|
10178 |
+
},
|
10179 |
+
{
|
10180 |
+
"epoch": 0.6136580201164638,
|
10181 |
+
"grad_norm": 0.04308043792843819,
|
10182 |
+
"learning_rate": 1.906355298103568e-08,
|
10183 |
+
"loss": 10.3277,
|
10184 |
+
"step": 1449
|
10185 |
+
},
|
10186 |
+
{
|
10187 |
+
"epoch": 0.614081524616199,
|
10188 |
+
"grad_norm": 0.03676469624042511,
|
10189 |
+
"learning_rate": 1.506266082615948e-08,
|
10190 |
+
"loss": 10.3288,
|
10191 |
+
"step": 1450
|
10192 |
+
},
|
10193 |
+
{
|
10194 |
+
"epoch": 0.6145050291159344,
|
10195 |
+
"grad_norm": 0.04436874762177467,
|
10196 |
+
"learning_rate": 1.1532417551873842e-08,
|
10197 |
+
"loss": 10.3308,
|
10198 |
+
"step": 1451
|
10199 |
+
},
|
10200 |
+
{
|
10201 |
+
"epoch": 0.6149285336156697,
|
10202 |
+
"grad_norm": 0.032175030559301376,
|
10203 |
+
"learning_rate": 8.472839775719443e-09,
|
10204 |
+
"loss": 10.329,
|
10205 |
+
"step": 1452
|
10206 |
+
},
|
10207 |
+
{
|
10208 |
+
"epoch": 0.615352038115405,
|
10209 |
+
"grad_norm": 0.05000218376517296,
|
10210 |
+
"learning_rate": 5.883941899731404e-09,
|
10211 |
+
"loss": 10.3264,
|
10212 |
+
"step": 1453
|
10213 |
+
},
|
10214 |
+
{
|
10215 |
+
"epoch": 0.6157755426151403,
|
10216 |
+
"grad_norm": 0.04700994864106178,
|
10217 |
+
"learning_rate": 3.7657361103837776e-09,
|
10218 |
+
"loss": 10.3328,
|
10219 |
+
"step": 1454
|
10220 |
+
},
|
10221 |
+
{
|
10222 |
+
"epoch": 0.6161990471148756,
|
10223 |
+
"grad_norm": 0.1858830451965332,
|
10224 |
+
"learning_rate": 2.118232378467422e-09,
|
10225 |
+
"loss": 10.3369,
|
10226 |
+
"step": 1455
|
10227 |
+
},
|
10228 |
+
{
|
10229 |
+
"epoch": 0.6166225516146109,
|
10230 |
+
"grad_norm": 0.03489432856440544,
|
10231 |
+
"learning_rate": 9.414384591233116e-10,
|
10232 |
+
"loss": 10.3226,
|
10233 |
+
"step": 1456
|
10234 |
+
},
|
10235 |
+
{
|
10236 |
+
"epoch": 0.6170460561143463,
|
10237 |
+
"grad_norm": 0.03102676011621952,
|
10238 |
+
"learning_rate": 2.3535989175371697e-10,
|
10239 |
+
"loss": 10.3309,
|
10240 |
+
"step": 1457
|
10241 |
+
},
|
10242 |
+
{
|
10243 |
+
"epoch": 0.6174695606140815,
|
10244 |
+
"grad_norm": 0.0351453572511673,
|
10245 |
+
"learning_rate": 0.0,
|
10246 |
+
"loss": 10.3298,
|
10247 |
+
"step": 1458
|
10248 |
}
|
10249 |
],
|
10250 |
"logging_steps": 1,
|
|
|
10259 |
"should_evaluate": false,
|
10260 |
"should_log": false,
|
10261 |
"should_save": true,
|
10262 |
+
"should_training_stop": true
|
10263 |
},
|
10264 |
"attributes": {}
|
10265 |
}
|
10266 |
},
|
10267 |
+
"total_flos": 31296030474240.0,
|
10268 |
"train_batch_size": 2,
|
10269 |
"trial_name": null,
|
10270 |
"trial_params": null
|