lmz commited on
Commit
5e474db
·
verified ·
1 Parent(s): b34416f

Update config.toml

Browse files
Files changed (1) hide show
  1. config.toml +11 -10
config.toml CHANGED
@@ -1,18 +1,18 @@
1
  mimi_name = "[email protected]"
2
- moshi_name = "hibiki-rs-dc2cf5a5@80.safetensors"
3
  tokenizer_name = "tokenizer_spm_48k_multi6_2.model"
4
 
5
  [model]
6
  text_in_vocab_size = 48001
7
  text_out_vocab_size = 48000
8
  audio_vocab_size = 2049
9
- audio_codebooks = 16
10
 
11
  [model.transformer]
12
- d_model = 2048
13
- num_heads = 16
14
- num_layers = 16
15
- dim_feedforward = 8192
16
  causal = true
17
  norm_first = true
18
  bias_ff = false
@@ -30,18 +30,19 @@ kv_repeat = 1
30
  max_seq_len = 4096
31
 
32
  [model.depformer]
33
- num_slices = 8
 
34
 
35
  [model.depformer.transformer]
36
  d_model = 1024
37
  num_heads = 16
38
- num_layers = 6
39
- dim_feedforward = 4096
40
  causal = true
41
  norm_first = true
42
  bias_ff = false
43
  bias_attn = false
44
- context = 32
45
  max_period = 10000
46
  use_conv_block = false
47
  use_conv_bias = true
 
1
  mimi_name = "[email protected]"
2
+ moshi_name = "hibiki-rs-220b12c0@200.safetensors
3
  tokenizer_name = "tokenizer_spm_48k_multi6_2.model"
4
 
5
  [model]
6
  text_in_vocab_size = 48001
7
  text_out_vocab_size = 48000
8
  audio_vocab_size = 2049
9
+ audio_codebooks = 32
10
 
11
  [model.transformer]
12
+ d_model = 2560
13
+ num_heads = 20
14
+ num_layers = 24
15
+ dim_feedforward = 10240
16
  causal = true
17
  norm_first = true
18
  bias_ff = false
 
30
  max_seq_len = 4096
31
 
32
  [model.depformer]
33
+ num_slices = 16
34
+ low_rank_embeddings = 128
35
 
36
  [model.depformer.transformer]
37
  d_model = 1024
38
  num_heads = 16
39
+ num_layers = 4
40
+ dim_feedforward = 3072
41
  causal = true
42
  norm_first = true
43
  bias_ff = false
44
  bias_attn = false
45
+ context = 16
46
  max_period = 10000
47
  use_conv_block = false
48
  use_conv_bias = true