maher4488 commited on
Commit
22d2cf8
·
verified ·
1 Parent(s): 4d56416

Outer Step 2. Inner Step 6740. Batch Size 392

Browse files
Files changed (3) hide show
  1. config.json +11 -6
  2. inner_optimizer.pt +3 -0
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,18 +268,23 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5129056,
272
- 5129069,
273
- 5129080,
274
- 5129092,
275
- 5129098
 
 
 
 
 
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
- "inner_step": 6735,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5063053,
285
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5129179,
272
+ 5129184,
273
+ 5129188,
274
+ 5129193,
275
+ 5129212,
276
+ 5129222,
277
+ 5129230,
278
+ 5129242,
279
+ 5129252,
280
+ 5129268
281
  ],
282
  "block_size": 1024,
283
  "bos_token_id": 50256,
284
  "embd_pdrop": 0.1,
285
  "eos_token_id": 50256,
286
  "initializer_range": 0.02,
287
+ "inner_step": 6740,
288
  "inner_steps": 0,
289
  "last_allreduce_block": 5063053,
290
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53e40decf0fd7c9d7b812127868ce71d0aa2dd1f2f2062205766db80f5ba146b
3
+ size 8081781770
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e60512e4abeff6962033e8beb0b03270d55ea046111a1820a3b98ff3939862d0
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa0f72b38f4f100e19fc97a50e8cf51318dfd47a04fa4d0b2a60e950b356c430
3
  size 4040701744