File size: 369 Bytes
8b09de8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
    "architecture": "WhisperEncoder",
    "dtype": "float16",
    "num_hidden_layers": 4,
    "num_attention_heads": 6,
    "hidden_size": 384,
    "max_position_embeddings": 1500,
    "has_position_embedding": true,
    "n_mels": 80,
    "vocab_size": 51864,
    "hidden_act": "gelu",
    "num_languages": 99,
    "quantization": {
        "quant_algo": null
    }
}