chmjkb's picture
Add config.json to each of the models
be5fb7c
raw
history blame contribute delete
366 Bytes
{
"dim": 3072,
"n_layers": 28,
"n_heads": 24,
"n_kv_heads": 8,
"vocab_size": 128256,
"ffn_dim_multiplier": 1.0,
"multiple_of": 256,
"norm_eps": 1e-05,
"rope_theta": 500000.0,
"use_scaled_rope": true,
"quantization_args": {
"group_size": 32
},
"lora_args": {
"rank": 16,
"scale": 2.0
}
}