File size: 409 Bytes
6b18c3c 3736f44 6b18c3c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
{
"bits": 4,
"group_size": 32,
"desc_act": true,
"sym": true,
"lm_head": false,
"quant_method": "gptq",
"checkpoint_format": "gptq",
"pack_dtype": "int32",
"meta": {
"quantizer": [
"gptqmodel:2.0.0"
],
"uri": "https://github.com/modelcloud/gptqmodel",
"damp_percent": 0.1,
"damp_auto_increment": 0.0025,
"static_groups": false,
"true_sequential": true
}
} |