Update config.json
Browse files- config.json +0 -4
config.json
CHANGED
@@ -5,8 +5,6 @@
|
|
5 |
],
|
6 |
"bos_token_id": 1,
|
7 |
"conv_kernel": 4,
|
8 |
-
"d_inner": 160,
|
9 |
-
"d_model": 2048,
|
10 |
"eos_token_id": 2,
|
11 |
"expand": 2,
|
12 |
"fused_add_norm": true,
|
@@ -18,10 +16,8 @@
|
|
18 |
"model_type": "mamba",
|
19 |
"n_layer": 48,
|
20 |
"num_hidden_layers": 32,
|
21 |
-
"pad_vocab_size_multiple": 8,
|
22 |
"residual_in_fp32": true,
|
23 |
"rms_norm": true,
|
24 |
-
"ssm_cfg": {},
|
25 |
"state_size": 16,
|
26 |
"time_step_rank": 128,
|
27 |
"torch_dtype": "float16",
|
|
|
5 |
],
|
6 |
"bos_token_id": 1,
|
7 |
"conv_kernel": 4,
|
|
|
|
|
8 |
"eos_token_id": 2,
|
9 |
"expand": 2,
|
10 |
"fused_add_norm": true,
|
|
|
16 |
"model_type": "mamba",
|
17 |
"n_layer": 48,
|
18 |
"num_hidden_layers": 32,
|
|
|
19 |
"residual_in_fp32": true,
|
20 |
"rms_norm": true,
|
|
|
21 |
"state_size": 16,
|
22 |
"time_step_rank": 128,
|
23 |
"torch_dtype": "float16",
|