greedy-intersection / config.json
kibrq's picture
Update model
d329cab
raw
history blame
352 Bytes
{
"architectures": [
"GreedyModel"
],
"auto_map": {
"AutoConfig": "configuration_greedy.GreedyConfig",
"AutoModelForCausalLM": "modeling_greedy.GreedyModel"
},
"eos_token_id": 8,
"pad_token_id": 9,
"reciprocals": null,
"reducables": null,
"torch_dtype": "float32",
"transformers_version": "4.21.1",
"vocab_size": 10
}