File size: 1,000 Bytes
f989d84 3a2aa34 f989d84 e424d6b f989d84 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
{
"architectures": [
"PAL_B_RM_opt"
],
"auto_map": {
"AutoConfig": "configuration_pal_b_rm.PAL_B_Config",
"AutoModel": "modeling_pal_b_rm.PAL_B_RM_opt"
},
"d_hid": 512,
"d_pref": 512,
"initializer_type": "gaussian",
"is_expectation_norm_init": false,
"is_gumbel_hard": null,
"is_temperature_learnable": false,
"k": 2,
"llm_name": "facebook/opt-350m",
"model_type": "facebook/opt",
"pref_learner_type": "angle",
"proj_arch": "mlp2-gelu-dropout0",
"sfx_temperature": 1.0,
"sfx_type": "softmax",
"torch_dtype": "float32",
"transformers_version": "4.44.2",
"uids": [
"KZL1qeRzHNYSfDAuOctL1iyVV8WC5N",
"ZzGCcAhvqF0HnKxNsUjtJFadcZdyZj",
"p4Oh7rUGyLe1EpilJFWr9sPDpkO016",
"qo6WIyEh27cwAjWpA3Q60J7NaDxzQJ",
"zKV8BFGy60O0q7102ALF84S6Jo5i4q",
"i8YiBZlrYmlkkChr5b9BUKvDO6lR1d",
"M3icahkfAtC9CJrtKgQ7qvyZ5SD8wC",
"HNzkrs9geGu1YMMfZ5Qvdt0ZaCthfB",
"Jxv4hxfb9zTVa5nsMDFlnjSX5LZ8MK",
"UhQipwcpQmiGJmScocXOGOKyCBaFUg"
]
}
|