File size: 212 Bytes
8c3a8de
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
{
    "architectures": [
        "LlamaForCausalLM"
    ],
    "model_type": "llama",
    "torch_dtype": "float16",
    "transformers_version": "4.49.0",
    "use_cache": true,
    "vocab_size": 32000
}