{ | |
"_name_or_path": "airesearch/wangchanberta-base-wiki-newmm", | |
"architectures": [ | |
"RobertaForTokenClassification" | |
], | |
"attention_probs_dropout_prob": 0.1, | |
"bos_token_id": 0, | |
"classifier_dropout": null, | |
"eos_token_id": 2, | |
"gradient_checkpointing": false, | |
"hidden_act": "gelu", | |
"hidden_dropout_prob": 0.1, | |
"hidden_size": 768, | |
"id2label": { | |
"0": "ADVI", | |
"1": "ADVN", | |
"2": "ADVP", | |
"3": "ADVS", | |
"4": "CFQC", | |
"5": "CLTV", | |
"6": "CMTR", | |
"7": "CMTR@PUNC", | |
"8": "CNIT", | |
"9": "CVBL", | |
"10": "DCNM", | |
"11": "DDAC", | |
"12": "DDAN", | |
"13": "DDAQ", | |
"14": "DDBQ", | |
"15": "DIAC", | |
"16": "DIAQ", | |
"17": "DIBQ", | |
"18": "DONM", | |
"19": "EAFF", | |
"20": "EITT", | |
"21": "FIXN", | |
"22": "FIXV", | |
"23": "JCMP", | |
"24": "JCRG", | |
"25": "JSBR", | |
"26": "NCMN", | |
"27": "NCNM", | |
"28": "NEG", | |
"29": "NLBL", | |
"30": "NONM", | |
"31": "NPRP", | |
"32": "NTTL", | |
"33": "PDMN", | |
"34": "PNTR", | |
"35": "PPRS", | |
"36": "PREL", | |
"37": "PUNC", | |
"38": "RPRE", | |
"39": "VACT", | |
"40": "VATT", | |
"41": "VSTA", | |
"42": "XVAE", | |
"43": "XVAM", | |
"44": "XVBB", | |
"45": "XVBM", | |
"46": "XVMM" | |
}, | |
"initializer_range": 0.02, | |
"intermediate_size": 3072, | |
"label2id": { | |
"ADVI": 0, | |
"ADVN": 1, | |
"ADVP": 2, | |
"ADVS": 3, | |
"CFQC": 4, | |
"CLTV": 5, | |
"CMTR": 6, | |
"CMTR@PUNC": 7, | |
"CNIT": 8, | |
"CVBL": 9, | |
"DCNM": 10, | |
"DDAC": 11, | |
"DDAN": 12, | |
"DDAQ": 13, | |
"DDBQ": 14, | |
"DIAC": 15, | |
"DIAQ": 16, | |
"DIBQ": 17, | |
"DONM": 18, | |
"EAFF": 19, | |
"EITT": 20, | |
"FIXN": 21, | |
"FIXV": 22, | |
"JCMP": 23, | |
"JCRG": 24, | |
"JSBR": 25, | |
"NCMN": 26, | |
"NCNM": 27, | |
"NEG": 28, | |
"NLBL": 29, | |
"NONM": 30, | |
"NPRP": 31, | |
"NTTL": 32, | |
"PDMN": 33, | |
"PNTR": 34, | |
"PPRS": 35, | |
"PREL": 36, | |
"PUNC": 37, | |
"RPRE": 38, | |
"VACT": 39, | |
"VATT": 40, | |
"VSTA": 41, | |
"XVAE": 42, | |
"XVAM": 43, | |
"XVBB": 44, | |
"XVBM": 45, | |
"XVMM": 46 | |
}, | |
"layer_norm_eps": 1e-05, | |
"mask_token_id": 4, | |
"max_position_embeddings": 514, | |
"model_type": "roberta", | |
"num_attention_heads": 12, | |
"num_hidden_layers": 12, | |
"pad_token_id": 1, | |
"position_embedding_type": "absolute", | |
"torch_dtype": "float32", | |
"transformers_version": "4.30.1", | |
"type_vocab_size": 1, | |
"unk_token_id": 3, | |
"use_cache": true, | |
"vocab_size": 97982 | |
} | |