File size: 1,204 Bytes
b34416f 2e424a0 b34416f 5e474db b34416f 5e474db b34416f 5e474db b34416f 5e474db b34416f 5e474db b34416f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 |
mimi_name = "[email protected]"
moshi_name = "[email protected]"
tokenizer_name = "tokenizer_spm_48k_multi6_2.model"
[model]
text_in_vocab_size = 48001
text_out_vocab_size = 48000
audio_vocab_size = 2049
audio_codebooks = 32
[model.transformer]
d_model = 2560
num_heads = 20
num_layers = 24
dim_feedforward = 10240
causal = true
norm_first = true
bias_ff = false
bias_attn = false
context = 500
max_period = 100000
use_conv_block = false
use_conv_bias = true
gating = "silu"
norm = "RmsNorm"
positional_embedding = "Rope"
conv_layout = false
conv_kernel_size = 3
kv_repeat = 1
max_seq_len = 4096
[model.depformer]
num_slices = 16
low_rank_embeddings = 128
[model.depformer.transformer]
d_model = 1024
num_heads = 16
num_layers = 4
dim_feedforward = 3072
causal = true
norm_first = true
bias_ff = false
bias_attn = false
context = 16
max_period = 10000
use_conv_block = false
use_conv_bias = true
gating = "silu"
norm = "RmsNorm"
positional_embedding = "None"
conv_layout = false
conv_kernel_size = 3
kv_repeat = 1
max_seq_len = 4096
[model.conditioners.description]
type = "Lut"
n_bins = 31
dim = 16
possible_values = ["very_bad", "bad", "neutral", "good", "very_good"]
|