lite-whisper-tiny-fast / config.json
eyoel-gebre's picture
Upload LiteWhisperForConditionalGeneration
ac4b74d verified
{
"_name_or_path": "efficient-speech/lite-whisper-tiny-fast",
"activation_dropout": 0.0,
"activation_function": "gelu",
"apply_spec_augment": false,
"architectures": [
"LiteWhisperForConditionalGeneration"
],
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_lite_whisper.LiteWhisperConfig",
"AutoModel": "modeling_lite_whisper.LiteWhisperForConditionalGeneration"
},
"begin_suppress_tokens": null,
"bos_token_id": 50257,
"classifier_proj_size": 256,
"d_model": 384,
"decoder_attention_heads": 6,
"decoder_ffn_dim": 1536,
"decoder_layerdrop": 0.0,
"decoder_layers": 4,
"decoder_start_token_id": 50258,
"dropout": 0.0,
"encoder_attention_heads": 6,
"encoder_ffn_dim": 1536,
"encoder_layerdrop": 0.0,
"encoder_layers": 4,
"eos_token_id": 50257,
"forced_decoder_ids": [
[
1,
50259
],
[
2,
50359
],
[
3,
50363
]
],
"init_std": 0.02,
"is_encoder_decoder": true,
"low_rank_config": [
{
"fc1": 240,
"k_proj": 80,
"out_proj": 144,
"q_proj": 96,
"v_proj": 144
},
{
"fc1": 288,
"fc2": 304,
"k_proj": 112,
"out_proj": 192,
"q_proj": 160
},
{
"fc1": 256,
"fc2": 128,
"k_proj": 64,
"q_proj": 112,
"v_proj": 192
},
{
"fc1": 304,
"fc2": 304,
"k_proj": 144,
"q_proj": 176
}
],
"mask_feature_length": 10,
"mask_feature_min_masks": 0,
"mask_feature_prob": 0.0,
"mask_time_length": 10,
"mask_time_min_masks": 2,
"mask_time_prob": 0.05,
"max_length": null,
"max_source_positions": 1500,
"max_target_positions": 448,
"median_filter_width": 7,
"model_type": "lite-whisper",
"num_hidden_layers": 4,
"num_mel_bins": 80,
"pad_token_id": 50257,
"scale_embedding": false,
"torch_dtype": "float32",
"transformers_version": "4.49.0",
"use_cache": true,
"use_weighted_layer_sum": false,
"vocab_size": 51865
}