Epoch 389
Browse files- config.json +2 -2
- model.safetensors +1 -1
config.json
CHANGED
@@ -6,8 +6,8 @@
|
|
6 |
],
|
7 |
"attn_pdrop": 0.1,
|
8 |
"auto_map": {
|
9 |
-
|
10 |
-
|
11 |
},
|
12 |
"block_size": 1024,
|
13 |
"bos_token_id": 50256,
|
|
|
6 |
],
|
7 |
"attn_pdrop": 0.1,
|
8 |
"auto_map": {
|
9 |
+
"AutoConfig": "distributed/optimized-gpt2-250m--configuration_gpt_optimized.GPTOptimConfig",
|
10 |
+
"AutoModelForCausalLM": "distributed/optimized-gpt2-250m--modeling_gpt_optimized.GPTOptim"
|
11 |
},
|
12 |
"block_size": 1024,
|
13 |
"bos_token_id": 50256,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1016427344
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71fbe27aacae6fdffe206fa11aa9e602f3f047e0d5148778131e4961f6bfcba9
|
3 |
size 1016427344
|