asahi417 commited on
Commit
dae39b9
·
1 Parent(s): d6c42a3
Files changed (2) hide show
  1. config.json +231 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,231 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "tner_ckpt/btc_roberta_large/best_model",
3
+ "architectures": [
4
+ "RobertaForTokenClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "crf_state_dict": {
10
+ "_constraint_mask": [
11
+ [
12
+ 1.0,
13
+ 1.0,
14
+ 1.0,
15
+ 1.0,
16
+ 0.0,
17
+ 0.0,
18
+ 1.0,
19
+ 0.0,
20
+ 1.0
21
+ ],
22
+ [
23
+ 1.0,
24
+ 1.0,
25
+ 1.0,
26
+ 0.0,
27
+ 1.0,
28
+ 0.0,
29
+ 1.0,
30
+ 0.0,
31
+ 1.0
32
+ ],
33
+ [
34
+ 1.0,
35
+ 1.0,
36
+ 1.0,
37
+ 0.0,
38
+ 0.0,
39
+ 1.0,
40
+ 1.0,
41
+ 0.0,
42
+ 1.0
43
+ ],
44
+ [
45
+ 1.0,
46
+ 1.0,
47
+ 1.0,
48
+ 1.0,
49
+ 0.0,
50
+ 0.0,
51
+ 1.0,
52
+ 0.0,
53
+ 1.0
54
+ ],
55
+ [
56
+ 1.0,
57
+ 1.0,
58
+ 1.0,
59
+ 0.0,
60
+ 1.0,
61
+ 0.0,
62
+ 1.0,
63
+ 0.0,
64
+ 1.0
65
+ ],
66
+ [
67
+ 1.0,
68
+ 1.0,
69
+ 1.0,
70
+ 0.0,
71
+ 0.0,
72
+ 1.0,
73
+ 1.0,
74
+ 0.0,
75
+ 1.0
76
+ ],
77
+ [
78
+ 1.0,
79
+ 1.0,
80
+ 1.0,
81
+ 0.0,
82
+ 0.0,
83
+ 0.0,
84
+ 1.0,
85
+ 0.0,
86
+ 1.0
87
+ ],
88
+ [
89
+ 1.0,
90
+ 1.0,
91
+ 1.0,
92
+ 0.0,
93
+ 0.0,
94
+ 0.0,
95
+ 1.0,
96
+ 0.0,
97
+ 0.0
98
+ ],
99
+ [
100
+ 0.0,
101
+ 0.0,
102
+ 0.0,
103
+ 0.0,
104
+ 0.0,
105
+ 0.0,
106
+ 0.0,
107
+ 0.0,
108
+ 0.0
109
+ ]
110
+ ],
111
+ "end_transitions": [
112
+ -1.6918301582336426,
113
+ 0.83443683385849,
114
+ -1.369484543800354,
115
+ 0.9246144890785217,
116
+ 1.069233775138855,
117
+ -0.16902348399162292,
118
+ -1.1555670499801636
119
+ ],
120
+ "start_transitions": [
121
+ 0.508945107460022,
122
+ 0.8599593043327332,
123
+ -0.7935769557952881,
124
+ 2.3224387168884277,
125
+ 1.7772325277328491,
126
+ -0.14362987875938416,
127
+ 1.1923292875289917
128
+ ],
129
+ "transitions": [
130
+ [
131
+ -0.16531139612197876,
132
+ -0.44062453508377075,
133
+ -0.17101362347602844,
134
+ 0.25618258118629456,
135
+ 0.8218539953231812,
136
+ -0.6791337132453918,
137
+ -0.18991276621818542
138
+ ],
139
+ [
140
+ 0.049584291875362396,
141
+ -0.029006613418459892,
142
+ 0.3484196066856384,
143
+ 0.10896176099777222,
144
+ 0.252639502286911,
145
+ -0.29233261942863464,
146
+ -0.09638770669698715
147
+ ],
148
+ [
149
+ -0.38768860697746277,
150
+ 0.08189011365175247,
151
+ -0.310952365398407,
152
+ -0.09067512303590775,
153
+ -0.34503042697906494,
154
+ 0.36938661336898804,
155
+ 0.2102288007736206
156
+ ],
157
+ [
158
+ 0.25750938057899475,
159
+ 0.8332298398017883,
160
+ -0.7139731049537659,
161
+ -0.2698363959789276,
162
+ -0.2478555291891098,
163
+ -0.15140868723392487,
164
+ -0.09971318393945694
165
+ ],
166
+ [
167
+ -0.08811905235052109,
168
+ 0.08423032611608505,
169
+ -0.09703858941793442,
170
+ 0.8387375473976135,
171
+ 0.055684782564640045,
172
+ 0.14125464856624603,
173
+ -0.33670008182525635
174
+ ],
175
+ [
176
+ 0.2373524159193039,
177
+ 0.07985689491033554,
178
+ 0.02303706295788288,
179
+ 0.3609643578529358,
180
+ 0.3878211975097656,
181
+ -0.557222306728363,
182
+ -0.34269899129867554
183
+ ],
184
+ [
185
+ -0.26117709279060364,
186
+ 0.45397937297821045,
187
+ 1.2661653757095337,
188
+ 0.4104919135570526,
189
+ 0.13623365759849548,
190
+ -0.05334918946027756,
191
+ -0.017508944496512413
192
+ ]
193
+ ]
194
+ },
195
+ "eos_token_id": 2,
196
+ "hidden_act": "gelu",
197
+ "hidden_dropout_prob": 0.1,
198
+ "hidden_size": 1024,
199
+ "id2label": {
200
+ "0": "B-LOC",
201
+ "1": "B-ORG",
202
+ "2": "B-PER",
203
+ "3": "I-LOC",
204
+ "4": "I-ORG",
205
+ "5": "I-PER",
206
+ "6": "O"
207
+ },
208
+ "initializer_range": 0.02,
209
+ "intermediate_size": 4096,
210
+ "label2id": {
211
+ "B-LOC": 0,
212
+ "B-ORG": 1,
213
+ "B-PER": 2,
214
+ "I-LOC": 3,
215
+ "I-ORG": 4,
216
+ "I-PER": 5,
217
+ "O": 6
218
+ },
219
+ "layer_norm_eps": 1e-05,
220
+ "max_position_embeddings": 514,
221
+ "model_type": "roberta",
222
+ "num_attention_heads": 16,
223
+ "num_hidden_layers": 24,
224
+ "pad_token_id": 1,
225
+ "position_embedding_type": "absolute",
226
+ "torch_dtype": "float32",
227
+ "transformers_version": "4.20.1",
228
+ "type_vocab_size": 1,
229
+ "use_cache": true,
230
+ "vocab_size": 50265
231
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d09155c297e6c1193d96398366bb54cbc734f09ea44c80aad6e320840c37409
3
+ size 1417400369