rakhman-llm commited on
Commit
a184528
·
verified ·
1 Parent(s): 74a6ce2

Training in progress, step 6000, checkpoint

Browse files
last-checkpoint/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:683c6b844253214d5535f32fb10d971f05c91a8acc294539eaa07b28c3f16d01
3
  size 891558696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb3011d25fd85a465cf3e0ce6a9bcc74aa9316f6611f88f06f23c17f5dd203b1
3
  size 891558696
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5e606906707907082dc8c6184eedbdf1ad6410646fdfb29ec0957ff6b4fea464
3
  size 1783272762
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8080d252b62a6ceb80eef3556d6b8ddb39639a5625e1d88dd9cc1e6ce9ba8af8
3
  size 1783272762
last-checkpoint/rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cd16ada593cf0a36458682125206d81fb88aab5b155751c1343bff1314ca90c7
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3e29c5cf029c43a0b9eb6818343605a4d54765fc02cea34588573e488f22ffd
3
  size 14244
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:33ec81ecd0f74edad60756c692726503a5467d4d5a63f6abe2c6e32cac418cdc
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:260e81eea4566c115fd02bd1907dd188f736d221da2bd52f057ef75265ae93af
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 2.3747841105354057,
5
  "eval_steps": 500,
6
- "global_step": 5500,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -39,9 +39,9 @@
39
  {
40
  "epoch": 1.0,
41
  "eval_loss": 0.33670297265052795,
42
- "eval_runtime": 28.3351,
43
- "eval_samples_per_second": 16.658,
44
- "eval_steps_per_second": 4.164,
45
  "step": 2316
46
  },
47
  {
@@ -82,9 +82,9 @@
82
  {
83
  "epoch": 2.0,
84
  "eval_loss": 0.3095574975013733,
85
- "eval_runtime": 28.3225,
86
- "eval_samples_per_second": 16.665,
87
- "eval_steps_per_second": 4.166,
88
  "step": 4632
89
  },
90
  {
@@ -100,6 +100,13 @@
100
  "learning_rate": 4.173862982153138e-06,
101
  "loss": 0.3513,
102
  "step": 5500
 
 
 
 
 
 
 
103
  }
104
  ],
105
  "logging_steps": 500,
@@ -119,7 +126,7 @@
119
  "attributes": {}
120
  }
121
  },
122
- "total_flos": 1.339585567653888e+16,
123
  "train_batch_size": 4,
124
  "trial_name": null,
125
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 2.5906735751295336,
5
  "eval_steps": 500,
6
+ "global_step": 6000,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
39
  {
40
  "epoch": 1.0,
41
  "eval_loss": 0.33670297265052795,
42
+ "eval_runtime": 28.2248,
43
+ "eval_samples_per_second": 16.723,
44
+ "eval_steps_per_second": 4.181,
45
  "step": 2316
46
  },
47
  {
 
82
  {
83
  "epoch": 2.0,
84
  "eval_loss": 0.3095574975013733,
85
+ "eval_runtime": 28.2419,
86
+ "eval_samples_per_second": 16.713,
87
+ "eval_steps_per_second": 4.178,
88
  "step": 4632
89
  },
90
  {
 
100
  "learning_rate": 4.173862982153138e-06,
101
  "loss": 0.3513,
102
  "step": 5500
103
+ },
104
+ {
105
+ "epoch": 2.5906735751295336,
106
+ "grad_norm": 0.5765931606292725,
107
+ "learning_rate": 2.734599884858952e-06,
108
+ "loss": 0.3428,
109
+ "step": 6000
110
  }
111
  ],
112
  "logging_steps": 500,
 
126
  "attributes": {}
127
  }
128
  },
129
+ "total_flos": 1.461377145765888e+16,
130
  "train_batch_size": 4,
131
  "trial_name": null,
132
  "trial_params": null
last-checkpoint/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0589186e917e79226308cded95843db5013776fee27e83a39d678ba03ec762d3
3
  size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2d71d788b491a41c54d3b024285748be4a2e443d732e96343f48dd9941e304a
3
  size 5432