tensor-tech commited on
Commit
c98ab1c
·
verified ·
1 Parent(s): e31013b

Run 4. Outer Step 22. Inner Step 6.

Browse files
Files changed (3) hide show
  1. config.json +6 -6
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -268,18 +268,18 @@
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
- 5526775,
272
- 5526781,
273
- 5526787,
274
- 5526793,
275
- 5526799
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
- "inner_step": 5,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5523943,
285
  "layer_norm_epsilon": 1e-05,
 
268
  "AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
269
  },
270
  "block_list": [
271
+ 5526806,
272
+ 5526813,
273
+ 5526818,
274
+ 5526825,
275
+ 5526831
276
  ],
277
  "block_size": 1024,
278
  "bos_token_id": 50256,
279
  "embd_pdrop": 0.1,
280
  "eos_token_id": 50256,
281
  "initializer_range": 0.02,
282
+ "inner_step": 6,
283
  "inner_steps": 0,
284
  "last_allreduce_block": 5523943,
285
  "layer_norm_epsilon": 1e-05,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:35a31d7f440ced604b67aaa4f787ab7fa6bd8f9663a116889aaf286dac4ff55e
3
  size 8081782026
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8bc2016f99bddcd2c00c112c41edb3b810b1156a9737f259378e50b0e738c164
3
  size 8081782026
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7d097d40cd656dda6c4d4ab963fd52c3444fd66c1d0c7681c6a271d6fad54a0b
3
  size 4040701744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e471e8aacb6eab2a1539606c1bcdea90393128866f8201d4021fd0ff35a393ca
3
  size 4040701744