Add new SentenceTransformer model
Browse files- 1_Pooling/config.json +10 -0
- README.md +566 -0
- config.json +47 -0
- config_sentence_transformers.json +10 -0
- model.safetensors +3 -0
- modules.json +14 -0
- sentence_bert_config.json +4 -0
- special_tokens_map.json +37 -0
- tokenizer.json +0 -0
- tokenizer_config.json +945 -0
1_Pooling/config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"word_embedding_dimension": 1024,
|
3 |
+
"pooling_mode_cls_token": true,
|
4 |
+
"pooling_mode_mean_tokens": false,
|
5 |
+
"pooling_mode_max_tokens": false,
|
6 |
+
"pooling_mode_mean_sqrt_len_tokens": false,
|
7 |
+
"pooling_mode_weightedmean_tokens": false,
|
8 |
+
"pooling_mode_lasttoken": false,
|
9 |
+
"include_prompt": true
|
10 |
+
}
|
README.md
ADDED
@@ -0,0 +1,566 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
tags:
|
3 |
+
- sentence-transformers
|
4 |
+
- sentence-similarity
|
5 |
+
- feature-extraction
|
6 |
+
- generated_from_trainer
|
7 |
+
- dataset_size:498970
|
8 |
+
- loss:BPRLoss
|
9 |
+
base_model: answerdotai/ModernBERT-large
|
10 |
+
widget:
|
11 |
+
- source_sentence: when was the world trade organization created
|
12 |
+
sentences:
|
13 |
+
- The Dark Side Of The WTO. The World Trade Organization (WTO) was created January
|
14 |
+
1, 1995, and it has been a source of controversy ever since. The birth of the
|
15 |
+
WTO was more of a continuation than a truly new creation.
|
16 |
+
- Updated May 30, 2016. The Great Depression started in 1929. On March 25, the stock
|
17 |
+
market had a correction. Many investors were wiped out, since they had borrowed
|
18 |
+
money from their stockbrokers. When the market fell, the brokers called in their
|
19 |
+
loans. That wiped out some businesses, including banks.
|
20 |
+
- "A Preparatory Committee was established in February 1946, and met for the first\
|
21 |
+
\ time in London in October 1946 to work on the charter of an international organization\
|
22 |
+
\ for trade; the work was continued from April to November 1947.even rounds of\
|
23 |
+
\ negotiations occurred under GATT before the eighth roundâ\x80\x94the Uruguay\
|
24 |
+
\ Round â\x80\x94concluded in 1994 with the establishment of the World Trade Organization\
|
25 |
+
\ (WTO) as the GATT's replacement."
|
26 |
+
- source_sentence: 'where do you find the routing # on a check'
|
27 |
+
sentences:
|
28 |
+
- The easiest way to find your routing number is to look at your bank checks. It's
|
29 |
+
easy to find the bank's ABA routing number and your account number as shown in
|
30 |
+
the example check image below. The image is only for reference purposes. In some
|
31 |
+
cases the order of the checking account number and check serial number is reversed.
|
32 |
+
Search all BB&T NORTH CAROLINA routing numbers in the table below. Use the Search
|
33 |
+
box to filter by city, state, address, routing number. Click on the routing number
|
34 |
+
link in the table below to navigate to it and see all the information about it
|
35 |
+
(address, telephone number, zip code, etc.).
|
36 |
+
- 'Another way to locate your routing number is by looking at a check. The number
|
37 |
+
on the lower left corner of your checks, as marked in this image, is the routing
|
38 |
+
number for that account. You can also find your routing number on the WellsFargo.com
|
39 |
+
website: wellsfargo.com/help/routing-number.'
|
40 |
+
- "Your system literally dries out. Sometimes dehydration occurs for simple reasons:\
|
41 |
+
\ You don't drink enough because you're sick or busy, or because you lack access\
|
42 |
+
\ to safe drinking water when you're traveling, hiking or camping. Other dehydration\
|
43 |
+
\ causes include: 1 Diarrhea, vomiting.2 Severe, acute diarrhea â\x80\x94 that\
|
44 |
+
\ is, diarrhea that comes on suddenly and violently â\x80\x94 can cause a tremendous\
|
45 |
+
\ loss of water and electrolytes in a short amount of time. 3 If you have vomiting\
|
46 |
+
\ along with diarrhea, you lose even more fluids and minerals.ometimes dehydration\
|
47 |
+
\ occurs for simple reasons: You don't drink enough because you're sick or busy,\
|
48 |
+
\ or because you lack access to safe drinking water when you're traveling, hiking\
|
49 |
+
\ or camping. Other dehydration causes include: 1 Diarrhea, vomiting."
|
50 |
+
- source_sentence: average litter size for new zealand whites
|
51 |
+
sentences:
|
52 |
+
- 1 They come in several different colors (black, red and white) but the white rabbits
|
53 |
+
are the most popular for meat production because of their large, broad, and muscular
|
54 |
+
bodies. 2 When mature, bucks weigh from 8-10 pounds and females from 9-12 pounds.
|
55 |
+
3 New Zealand rabbits are ready to slaughter as fryers after just 2 months.
|
56 |
+
- Quoits is a traditional target throwing game in which rings are thrown at a target
|
57 |
+
spike, and the objective is to get them as close as possible to the targets. The
|
58 |
+
origin of the game is not very clear and there are theories that suggest the game
|
59 |
+
originated from similar games played in ancient Greece.
|
60 |
+
- When mature, bucks weigh from 8-10 pounds and females from 9-12 pounds. New Zealand
|
61 |
+
rabbits are ready to slaughter as fryers after just 2 months. Their average litter
|
62 |
+
size is 8-10 bunnies.
|
63 |
+
- source_sentence: what is parasitic infection
|
64 |
+
sentences:
|
65 |
+
- A parasitic disease is an infectious disease caused or transmitted by a parasite.
|
66 |
+
Many parasites do not cause diseases. Parasitic diseases can affect practically
|
67 |
+
all living organisms, including plants and mammals. The study of parasitic diseases
|
68 |
+
is called parasitology.erminology [edit]. Although organisms such as bacteria
|
69 |
+
function as parasites, the usage of the term parasitic disease is usually more
|
70 |
+
restricted. The three main types of organisms causing these conditions are protozoa
|
71 |
+
(causing protozoan infection), helminths (helminthiasis), and ectoparasites.
|
72 |
+
- Episodes are broadcast on Sunday at 9:00 pm Eastern Time, and the episodes for
|
73 |
+
season one to six are between 50 and 69 minutes in length. The first six seasons
|
74 |
+
are available on DVD and Blu-ray. As of June 26, 2016, 60 episodes of Game of
|
75 |
+
Thrones have aired, concluding the sixth season. The series was renewed for a
|
76 |
+
seventh season in April 2016, which will consist of seven episodes and premiere
|
77 |
+
on July 16, 2017. The series will conclude with its eighth season, which will
|
78 |
+
consist of six episodes.
|
79 |
+
- 'It is possible to get sepsis and not know you have an infection or the doctors
|
80 |
+
might not be able to figure out what the infection was. All types of infections
|
81 |
+
can cause sepsis: Bacterial. Viral. Fungal. Parasitic A bacterial infection is
|
82 |
+
caused by bacteria, such as E. coli, or group A streptococcus.'
|
83 |
+
- source_sentence: what is the average top third score on the act
|
84 |
+
sentences:
|
85 |
+
- The average ACT score composite at Duke is a 34. The 25th percentile ACT score
|
86 |
+
is 32, and the 75th percentile ACT score is 35. In other words, a 32 places you
|
87 |
+
below average, while a 35 will move you up to above average.f you're a junior
|
88 |
+
or senior, your GPA is hard to change from this point on. If your GPA is at or
|
89 |
+
below the school average of 4.19, you'll need a higher ACT score to compensate
|
90 |
+
and show that you're prepared to take on college academics.
|
91 |
+
- North Dakota is among a dozen states where high school students are required to
|
92 |
+
take the ACT before graduating. The state tied with Colorado for third with an
|
93 |
+
average composite score of 20.6 this year. Utah was first with an average of 20.8
|
94 |
+
and Illinois was second at 20.7. ACT composite scores range from 1 to 36. The
|
95 |
+
national average is 21.0. A total of 7,227 students in North Dakota took the ACT
|
96 |
+
this year.
|
97 |
+
- Warning. Itchy upper back skin may be caused by an allergic reaction to medications.
|
98 |
+
If you take medications, talk with your doctor. Drugs that commonly cause this
|
99 |
+
issue include antibiotics, narcotic pain medications and antifungal medications,
|
100 |
+
according to MayoClinic.com.
|
101 |
+
pipeline_tag: sentence-similarity
|
102 |
+
library_name: sentence-transformers
|
103 |
+
---
|
104 |
+
|
105 |
+
# SentenceTransformer based on answerdotai/ModernBERT-large
|
106 |
+
|
107 |
+
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [answerdotai/ModernBERT-large](https://huggingface.co/answerdotai/ModernBERT-large). It maps sentences & paragraphs to a 1024-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
|
108 |
+
|
109 |
+
## Model Details
|
110 |
+
|
111 |
+
### Model Description
|
112 |
+
- **Model Type:** Sentence Transformer
|
113 |
+
- **Base model:** [answerdotai/ModernBERT-large](https://huggingface.co/answerdotai/ModernBERT-large) <!-- at revision 45bb4654a4d5aaff24dd11d4781fa46d39bf8c13 -->
|
114 |
+
- **Maximum Sequence Length:** 8192 tokens
|
115 |
+
- **Output Dimensionality:** 1024 dimensions
|
116 |
+
- **Similarity Function:** Cosine Similarity
|
117 |
+
<!-- - **Training Dataset:** Unknown -->
|
118 |
+
<!-- - **Language:** Unknown -->
|
119 |
+
<!-- - **License:** Unknown -->
|
120 |
+
|
121 |
+
### Model Sources
|
122 |
+
|
123 |
+
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
|
124 |
+
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
|
125 |
+
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
|
126 |
+
|
127 |
+
### Full Model Architecture
|
128 |
+
|
129 |
+
```
|
130 |
+
SentenceTransformer(
|
131 |
+
(0): Transformer({'max_seq_length': 8192, 'do_lower_case': False}) with Transformer model: ModernBertModel
|
132 |
+
(1): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
|
133 |
+
)
|
134 |
+
```
|
135 |
+
|
136 |
+
## Usage
|
137 |
+
|
138 |
+
### Direct Usage (Sentence Transformers)
|
139 |
+
|
140 |
+
First install the Sentence Transformers library:
|
141 |
+
|
142 |
+
```bash
|
143 |
+
pip install -U sentence-transformers
|
144 |
+
```
|
145 |
+
|
146 |
+
Then you can load this model and run inference.
|
147 |
+
```python
|
148 |
+
from sentence_transformers import SentenceTransformer
|
149 |
+
|
150 |
+
# Download from the 🤗 Hub
|
151 |
+
model = SentenceTransformer("BlackBeenie/ModernBERT-large-msmarco-bpr")
|
152 |
+
# Run inference
|
153 |
+
sentences = [
|
154 |
+
'what is the average top third score on the act',
|
155 |
+
'North Dakota is among a dozen states where high school students are required to take the ACT before graduating. The state tied with Colorado for third with an average composite score of 20.6 this year. Utah was first with an average of 20.8 and Illinois was second at 20.7. ACT composite scores range from 1 to 36. The national average is 21.0. A total of 7,227 students in North Dakota took the ACT this year.',
|
156 |
+
"The average ACT score composite at Duke is a 34. The 25th percentile ACT score is 32, and the 75th percentile ACT score is 35. In other words, a 32 places you below average, while a 35 will move you up to above average.f you're a junior or senior, your GPA is hard to change from this point on. If your GPA is at or below the school average of 4.19, you'll need a higher ACT score to compensate and show that you're prepared to take on college academics.",
|
157 |
+
]
|
158 |
+
embeddings = model.encode(sentences)
|
159 |
+
print(embeddings.shape)
|
160 |
+
# [3, 1024]
|
161 |
+
|
162 |
+
# Get the similarity scores for the embeddings
|
163 |
+
similarities = model.similarity(embeddings, embeddings)
|
164 |
+
print(similarities.shape)
|
165 |
+
# [3, 3]
|
166 |
+
```
|
167 |
+
|
168 |
+
<!--
|
169 |
+
### Direct Usage (Transformers)
|
170 |
+
|
171 |
+
<details><summary>Click to see the direct usage in Transformers</summary>
|
172 |
+
|
173 |
+
</details>
|
174 |
+
-->
|
175 |
+
|
176 |
+
<!--
|
177 |
+
### Downstream Usage (Sentence Transformers)
|
178 |
+
|
179 |
+
You can finetune this model on your own dataset.
|
180 |
+
|
181 |
+
<details><summary>Click to expand</summary>
|
182 |
+
|
183 |
+
</details>
|
184 |
+
-->
|
185 |
+
|
186 |
+
<!--
|
187 |
+
### Out-of-Scope Use
|
188 |
+
|
189 |
+
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
190 |
+
-->
|
191 |
+
|
192 |
+
<!--
|
193 |
+
## Bias, Risks and Limitations
|
194 |
+
|
195 |
+
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
|
196 |
+
-->
|
197 |
+
|
198 |
+
<!--
|
199 |
+
### Recommendations
|
200 |
+
|
201 |
+
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
|
202 |
+
-->
|
203 |
+
|
204 |
+
## Training Details
|
205 |
+
|
206 |
+
### Training Dataset
|
207 |
+
|
208 |
+
#### Unnamed Dataset
|
209 |
+
|
210 |
+
* Size: 498,970 training samples
|
211 |
+
* Columns: <code>sentence_0</code>, <code>sentence_1</code>, and <code>sentence_2</code>
|
212 |
+
* Approximate statistics based on the first 1000 samples:
|
213 |
+
| | sentence_0 | sentence_1 | sentence_2 |
|
214 |
+
|:--------|:---------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
215 |
+
| type | string | string | string |
|
216 |
+
| details | <ul><li>min: 4 tokens</li><li>mean: 9.24 tokens</li><li>max: 27 tokens</li></ul> | <ul><li>min: 23 tokens</li><li>mean: 83.71 tokens</li><li>max: 279 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 79.72 tokens</li><li>max: 262 tokens</li></ul> |
|
217 |
+
* Samples:
|
218 |
+
| sentence_0 | sentence_1 | sentence_2 |
|
219 |
+
|:----------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
220 |
+
| <code>what is tongkat ali</code> | <code>Tongkat Ali is a very powerful herb that acts as a sex enhancer by naturally increasing the testosterone levels, and revitalizing sexual impotence, performance and pleasure. Tongkat Ali is also effective in building muscular volume & strength resulting to a healthy physique.</code> | <code>However, unlike tongkat ali extract, tongkat ali chipped root and root powder are not sterile. Thus, the raw consumption of root powder is not recommended. The traditional preparation in Indonesia and Malaysia is to boil chipped roots as a tea. A standard dosage would be 50 gram of chipped root per person per day.</code> |
|
221 |
+
| <code>cost to install engineered hardwood flooring</code> | <code>Burton says his customers typically spend about $8 per square foot for engineered hardwood flooring; add an additional $2 per square foot for installation. Minion says consumers should expect to pay $7 to $12 per square foot for quality hardwood flooring. âIf the homeowner buys the wood and you need somebody to install it, usually an installation goes for about $2 a square foot,â Bill LeBeau, owner of LeBeauâs Hardwood Floors of Huntersville, North Carolina, says.</code> | <code>Installing hardwood flooring can cost between $9 and $12 per square foot, compared with about $3 to $5 per square foot for carpetâso some homeowners opt to install hardwood only in some rooms rather than throughout their home.However, carpet typically needs to be replaced if it becomes stained or worn out.ardwood flooring lasts longer than carpet, can be easier to keep clean and can be refinished. In the end, though, the decision about whether to install hardwood or carpeting in a bedroom should be based on your personal preference, at least if you intend to stay in the home for years.</code> |
|
222 |
+
| <code>define pollute</code> | <code>pollutes; polluted; polluting. Learner's definition of POLLUTE. [+ object] : to make (land, water, air, etc.) dirty and not safe or suitable to use. Waste from the factory had polluted [=contaminated] the river. Miles of beaches were polluted by the oil spill. Car exhaust pollutes the air.</code> | <code>Definition of pollute written for English Language Learners from the Merriam-Webster Learner's Dictionary with audio pronunciations, usage examples, and count/noncount noun labels. Learner's Dictionary mobile search</code> |
|
223 |
+
* Loss: <code>beir.losses.bpr_loss.BPRLoss</code>
|
224 |
+
|
225 |
+
### Training Hyperparameters
|
226 |
+
#### Non-Default Hyperparameters
|
227 |
+
|
228 |
+
- `eval_strategy`: steps
|
229 |
+
- `per_device_train_batch_size`: 32
|
230 |
+
- `per_device_eval_batch_size`: 32
|
231 |
+
- `num_train_epochs`: 5
|
232 |
+
- `fp16`: True
|
233 |
+
- `multi_dataset_batch_sampler`: round_robin
|
234 |
+
|
235 |
+
#### All Hyperparameters
|
236 |
+
<details><summary>Click to expand</summary>
|
237 |
+
|
238 |
+
- `overwrite_output_dir`: False
|
239 |
+
- `do_predict`: False
|
240 |
+
- `eval_strategy`: steps
|
241 |
+
- `prediction_loss_only`: True
|
242 |
+
- `per_device_train_batch_size`: 32
|
243 |
+
- `per_device_eval_batch_size`: 32
|
244 |
+
- `per_gpu_train_batch_size`: None
|
245 |
+
- `per_gpu_eval_batch_size`: None
|
246 |
+
- `gradient_accumulation_steps`: 1
|
247 |
+
- `eval_accumulation_steps`: None
|
248 |
+
- `torch_empty_cache_steps`: None
|
249 |
+
- `learning_rate`: 5e-05
|
250 |
+
- `weight_decay`: 0.0
|
251 |
+
- `adam_beta1`: 0.9
|
252 |
+
- `adam_beta2`: 0.999
|
253 |
+
- `adam_epsilon`: 1e-08
|
254 |
+
- `max_grad_norm`: 1
|
255 |
+
- `num_train_epochs`: 5
|
256 |
+
- `max_steps`: -1
|
257 |
+
- `lr_scheduler_type`: linear
|
258 |
+
- `lr_scheduler_kwargs`: {}
|
259 |
+
- `warmup_ratio`: 0.0
|
260 |
+
- `warmup_steps`: 0
|
261 |
+
- `log_level`: passive
|
262 |
+
- `log_level_replica`: warning
|
263 |
+
- `log_on_each_node`: True
|
264 |
+
- `logging_nan_inf_filter`: True
|
265 |
+
- `save_safetensors`: True
|
266 |
+
- `save_on_each_node`: False
|
267 |
+
- `save_only_model`: False
|
268 |
+
- `restore_callback_states_from_checkpoint`: False
|
269 |
+
- `no_cuda`: False
|
270 |
+
- `use_cpu`: False
|
271 |
+
- `use_mps_device`: False
|
272 |
+
- `seed`: 42
|
273 |
+
- `data_seed`: None
|
274 |
+
- `jit_mode_eval`: False
|
275 |
+
- `use_ipex`: False
|
276 |
+
- `bf16`: False
|
277 |
+
- `fp16`: True
|
278 |
+
- `fp16_opt_level`: O1
|
279 |
+
- `half_precision_backend`: auto
|
280 |
+
- `bf16_full_eval`: False
|
281 |
+
- `fp16_full_eval`: False
|
282 |
+
- `tf32`: None
|
283 |
+
- `local_rank`: 0
|
284 |
+
- `ddp_backend`: None
|
285 |
+
- `tpu_num_cores`: None
|
286 |
+
- `tpu_metrics_debug`: False
|
287 |
+
- `debug`: []
|
288 |
+
- `dataloader_drop_last`: False
|
289 |
+
- `dataloader_num_workers`: 0
|
290 |
+
- `dataloader_prefetch_factor`: None
|
291 |
+
- `past_index`: -1
|
292 |
+
- `disable_tqdm`: False
|
293 |
+
- `remove_unused_columns`: True
|
294 |
+
- `label_names`: None
|
295 |
+
- `load_best_model_at_end`: False
|
296 |
+
- `ignore_data_skip`: False
|
297 |
+
- `fsdp`: []
|
298 |
+
- `fsdp_min_num_params`: 0
|
299 |
+
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
|
300 |
+
- `fsdp_transformer_layer_cls_to_wrap`: None
|
301 |
+
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
|
302 |
+
- `deepspeed`: None
|
303 |
+
- `label_smoothing_factor`: 0.0
|
304 |
+
- `optim`: adamw_torch
|
305 |
+
- `optim_args`: None
|
306 |
+
- `adafactor`: False
|
307 |
+
- `group_by_length`: False
|
308 |
+
- `length_column_name`: length
|
309 |
+
- `ddp_find_unused_parameters`: None
|
310 |
+
- `ddp_bucket_cap_mb`: None
|
311 |
+
- `ddp_broadcast_buffers`: False
|
312 |
+
- `dataloader_pin_memory`: True
|
313 |
+
- `dataloader_persistent_workers`: False
|
314 |
+
- `skip_memory_metrics`: True
|
315 |
+
- `use_legacy_prediction_loop`: False
|
316 |
+
- `push_to_hub`: False
|
317 |
+
- `resume_from_checkpoint`: None
|
318 |
+
- `hub_model_id`: None
|
319 |
+
- `hub_strategy`: every_save
|
320 |
+
- `hub_private_repo`: None
|
321 |
+
- `hub_always_push`: False
|
322 |
+
- `gradient_checkpointing`: False
|
323 |
+
- `gradient_checkpointing_kwargs`: None
|
324 |
+
- `include_inputs_for_metrics`: False
|
325 |
+
- `include_for_metrics`: []
|
326 |
+
- `eval_do_concat_batches`: True
|
327 |
+
- `fp16_backend`: auto
|
328 |
+
- `push_to_hub_model_id`: None
|
329 |
+
- `push_to_hub_organization`: None
|
330 |
+
- `mp_parameters`:
|
331 |
+
- `auto_find_batch_size`: False
|
332 |
+
- `full_determinism`: False
|
333 |
+
- `torchdynamo`: None
|
334 |
+
- `ray_scope`: last
|
335 |
+
- `ddp_timeout`: 1800
|
336 |
+
- `torch_compile`: False
|
337 |
+
- `torch_compile_backend`: None
|
338 |
+
- `torch_compile_mode`: None
|
339 |
+
- `dispatch_batches`: None
|
340 |
+
- `split_batches`: None
|
341 |
+
- `include_tokens_per_second`: False
|
342 |
+
- `include_num_input_tokens_seen`: False
|
343 |
+
- `neftune_noise_alpha`: None
|
344 |
+
- `optim_target_modules`: None
|
345 |
+
- `batch_eval_metrics`: False
|
346 |
+
- `eval_on_start`: False
|
347 |
+
- `use_liger_kernel`: False
|
348 |
+
- `eval_use_gather_object`: False
|
349 |
+
- `average_tokens_across_devices`: False
|
350 |
+
- `prompts`: None
|
351 |
+
- `batch_sampler`: batch_sampler
|
352 |
+
- `multi_dataset_batch_sampler`: round_robin
|
353 |
+
|
354 |
+
</details>
|
355 |
+
|
356 |
+
### Training Logs
|
357 |
+
<details><summary>Click to expand</summary>
|
358 |
+
|
359 |
+
| Epoch | Step | Training Loss |
|
360 |
+
|:------:|:-----:|:-------------:|
|
361 |
+
| 0.0321 | 500 | 1.517 |
|
362 |
+
| 0.0641 | 1000 | 0.355 |
|
363 |
+
| 0.0962 | 1500 | 0.3123 |
|
364 |
+
| 0.1283 | 2000 | 0.2916 |
|
365 |
+
| 0.1603 | 2500 | 0.2805 |
|
366 |
+
| 0.1924 | 3000 | 0.2782 |
|
367 |
+
| 0.2245 | 3500 | 0.2806 |
|
368 |
+
| 0.2565 | 4000 | 0.2831 |
|
369 |
+
| 0.2886 | 4500 | 0.2837 |
|
370 |
+
| 0.3207 | 5000 | 0.2603 |
|
371 |
+
| 0.3527 | 5500 | 0.2529 |
|
372 |
+
| 0.3848 | 6000 | 0.2681 |
|
373 |
+
| 0.4169 | 6500 | 0.2573 |
|
374 |
+
| 0.4489 | 7000 | 0.2678 |
|
375 |
+
| 0.4810 | 7500 | 0.2786 |
|
376 |
+
| 0.5131 | 8000 | 0.2559 |
|
377 |
+
| 0.5451 | 8500 | 0.2771 |
|
378 |
+
| 0.5772 | 9000 | 0.2807 |
|
379 |
+
| 0.6092 | 9500 | 0.2627 |
|
380 |
+
| 0.6413 | 10000 | 0.2536 |
|
381 |
+
| 0.6734 | 10500 | 0.2607 |
|
382 |
+
| 0.7054 | 11000 | 0.2578 |
|
383 |
+
| 0.7375 | 11500 | 0.2615 |
|
384 |
+
| 0.7696 | 12000 | 0.2624 |
|
385 |
+
| 0.8016 | 12500 | 0.2491 |
|
386 |
+
| 0.8337 | 13000 | 0.2487 |
|
387 |
+
| 0.8658 | 13500 | 0.2524 |
|
388 |
+
| 0.8978 | 14000 | 0.2465 |
|
389 |
+
| 0.9299 | 14500 | 0.2575 |
|
390 |
+
| 0.9620 | 15000 | 0.2412 |
|
391 |
+
| 0.9940 | 15500 | 0.2514 |
|
392 |
+
| 1.0 | 15593 | - |
|
393 |
+
| 1.0261 | 16000 | 0.1599 |
|
394 |
+
| 1.0582 | 16500 | 0.1495 |
|
395 |
+
| 1.0902 | 17000 | 0.1494 |
|
396 |
+
| 1.1223 | 17500 | 0.1437 |
|
397 |
+
| 1.1544 | 18000 | 0.1541 |
|
398 |
+
| 1.1864 | 18500 | 0.1455 |
|
399 |
+
| 1.2185 | 19000 | 0.1424 |
|
400 |
+
| 1.2506 | 19500 | 0.1456 |
|
401 |
+
| 1.2826 | 20000 | 0.1552 |
|
402 |
+
| 1.3147 | 20500 | 0.1508 |
|
403 |
+
| 1.3468 | 21000 | 0.1474 |
|
404 |
+
| 1.3788 | 21500 | 0.1534 |
|
405 |
+
| 1.4109 | 22000 | 0.1505 |
|
406 |
+
| 1.4430 | 22500 | 0.149 |
|
407 |
+
| 1.4750 | 23000 | 0.1616 |
|
408 |
+
| 1.5071 | 23500 | 0.1528 |
|
409 |
+
| 1.5392 | 24000 | 0.1531 |
|
410 |
+
| 1.5712 | 24500 | 0.151 |
|
411 |
+
| 1.6033 | 25000 | 0.1666 |
|
412 |
+
| 1.6353 | 25500 | 0.153 |
|
413 |
+
| 1.6674 | 26000 | 0.1532 |
|
414 |
+
| 1.6995 | 26500 | 0.1614 |
|
415 |
+
| 1.7315 | 27000 | 0.1576 |
|
416 |
+
| 1.7636 | 27500 | 0.154 |
|
417 |
+
| 1.7957 | 28000 | 0.1597 |
|
418 |
+
| 1.8277 | 28500 | 0.1512 |
|
419 |
+
| 1.8598 | 29000 | 0.1652 |
|
420 |
+
| 1.8919 | 29500 | 0.151 |
|
421 |
+
| 1.9239 | 30000 | 0.1561 |
|
422 |
+
| 1.9560 | 30500 | 0.1508 |
|
423 |
+
| 1.9881 | 31000 | 0.1463 |
|
424 |
+
| 2.0 | 31186 | - |
|
425 |
+
| 2.0201 | 31500 | 0.0999 |
|
426 |
+
| 2.0522 | 32000 | 0.0829 |
|
427 |
+
| 2.0843 | 32500 | 0.0799 |
|
428 |
+
| 2.1163 | 33000 | 0.0843 |
|
429 |
+
| 2.1484 | 33500 | 0.091 |
|
430 |
+
| 2.1805 | 34000 | 0.0843 |
|
431 |
+
| 2.2125 | 34500 | 0.092 |
|
432 |
+
| 2.2446 | 35000 | 0.0879 |
|
433 |
+
| 2.2767 | 35500 | 0.0914 |
|
434 |
+
| 2.3087 | 36000 | 0.092 |
|
435 |
+
| 2.3408 | 36500 | 0.101 |
|
436 |
+
| 2.3729 | 37000 | 0.1038 |
|
437 |
+
| 2.4049 | 37500 | 0.1084 |
|
438 |
+
| 2.4370 | 38000 | 0.0923 |
|
439 |
+
| 2.4691 | 38500 | 0.1083 |
|
440 |
+
| 2.5011 | 39000 | 0.0909 |
|
441 |
+
| 2.5332 | 39500 | 0.0918 |
|
442 |
+
| 2.5653 | 40000 | 0.101 |
|
443 |
+
| 2.5973 | 40500 | 0.0935 |
|
444 |
+
| 2.6294 | 41000 | 0.0858 |
|
445 |
+
| 2.6615 | 41500 | 0.0821 |
|
446 |
+
| 2.6935 | 42000 | 0.0755 |
|
447 |
+
| 2.7256 | 42500 | 0.0902 |
|
448 |
+
| 2.7576 | 43000 | 0.0906 |
|
449 |
+
| 2.7897 | 43500 | 0.089 |
|
450 |
+
| 2.8218 | 44000 | 0.088 |
|
451 |
+
| 2.8538 | 44500 | 0.0866 |
|
452 |
+
| 2.8859 | 45000 | 0.0914 |
|
453 |
+
| 2.9180 | 45500 | 0.0903 |
|
454 |
+
| 2.9500 | 46000 | 0.0903 |
|
455 |
+
| 2.9821 | 46500 | 0.0932 |
|
456 |
+
| 3.0 | 46779 | - |
|
457 |
+
| 3.0142 | 47000 | 0.0724 |
|
458 |
+
| 3.0462 | 47500 | 0.0465 |
|
459 |
+
| 3.0783 | 48000 | 0.049 |
|
460 |
+
| 3.1104 | 48500 | 0.0458 |
|
461 |
+
| 3.1424 | 49000 | 0.0461 |
|
462 |
+
| 3.1745 | 49500 | 0.0456 |
|
463 |
+
| 3.2066 | 50000 | 0.0469 |
|
464 |
+
| 3.2386 | 50500 | 0.051 |
|
465 |
+
| 3.2707 | 51000 | 0.044 |
|
466 |
+
| 3.3028 | 51500 | 0.0551 |
|
467 |
+
| 3.3348 | 52000 | 0.0549 |
|
468 |
+
| 3.3669 | 52500 | 0.0539 |
|
469 |
+
| 3.3990 | 53000 | 0.0515 |
|
470 |
+
| 3.4310 | 53500 | 0.0544 |
|
471 |
+
| 3.4631 | 54000 | 0.044 |
|
472 |
+
| 3.4952 | 54500 | 0.0499 |
|
473 |
+
| 3.5272 | 55000 | 0.0557 |
|
474 |
+
| 3.5593 | 55500 | 0.0571 |
|
475 |
+
| 3.5914 | 56000 | 0.0673 |
|
476 |
+
| 3.6234 | 56500 | 0.0512 |
|
477 |
+
| 3.6555 | 57000 | 0.0474 |
|
478 |
+
| 3.6876 | 57500 | 0.049 |
|
479 |
+
| 3.7196 | 58000 | 0.0552 |
|
480 |
+
| 3.7517 | 58500 | 0.046 |
|
481 |
+
| 3.7837 | 59000 | 0.0488 |
|
482 |
+
| 3.8158 | 59500 | 0.0477 |
|
483 |
+
| 3.8479 | 60000 | 0.054 |
|
484 |
+
| 3.8799 | 60500 | 0.0595 |
|
485 |
+
| 3.9120 | 61000 | 0.0462 |
|
486 |
+
| 3.9441 | 61500 | 0.0472 |
|
487 |
+
| 3.9761 | 62000 | 0.0553 |
|
488 |
+
| 4.0 | 62372 | - |
|
489 |
+
| 4.0082 | 62500 | 0.0438 |
|
490 |
+
| 4.0403 | 63000 | 0.0178 |
|
491 |
+
| 4.0723 | 63500 | 0.0187 |
|
492 |
+
| 4.1044 | 64000 | 0.0219 |
|
493 |
+
| 4.1365 | 64500 | 0.0254 |
|
494 |
+
| 4.1685 | 65000 | 0.0222 |
|
495 |
+
| 4.2006 | 65500 | 0.0229 |
|
496 |
+
| 4.2327 | 66000 | 0.0206 |
|
497 |
+
| 4.2647 | 66500 | 0.0195 |
|
498 |
+
| 4.2968 | 67000 | 0.0184 |
|
499 |
+
| 4.3289 | 67500 | 0.0224 |
|
500 |
+
| 4.3609 | 68000 | 0.019 |
|
501 |
+
| 4.3930 | 68500 | 0.0204 |
|
502 |
+
| 4.4251 | 69000 | 0.0187 |
|
503 |
+
| 4.4571 | 69500 | 0.0207 |
|
504 |
+
| 4.4892 | 70000 | 0.0215 |
|
505 |
+
| 4.5213 | 70500 | 0.0194 |
|
506 |
+
| 4.5533 | 71000 | 0.0206 |
|
507 |
+
| 4.5854 | 71500 | 0.0189 |
|
508 |
+
| 4.6175 | 72000 | 0.0222 |
|
509 |
+
| 4.6495 | 72500 | 0.0198 |
|
510 |
+
| 4.6816 | 73000 | 0.0199 |
|
511 |
+
| 4.7137 | 73500 | 0.0155 |
|
512 |
+
| 4.7457 | 74000 | 0.0185 |
|
513 |
+
| 4.7778 | 74500 | 0.0176 |
|
514 |
+
| 4.8099 | 75000 | 0.0181 |
|
515 |
+
| 4.8419 | 75500 | 0.0165 |
|
516 |
+
| 4.8740 | 76000 | 0.0204 |
|
517 |
+
| 4.9060 | 76500 | 0.0163 |
|
518 |
+
| 4.9381 | 77000 | 0.0154 |
|
519 |
+
| 4.9702 | 77500 | 0.0194 |
|
520 |
+
| 5.0 | 77965 | - |
|
521 |
+
|
522 |
+
</details>
|
523 |
+
|
524 |
+
### Framework Versions
|
525 |
+
- Python: 3.11.11
|
526 |
+
- Sentence Transformers: 3.4.1
|
527 |
+
- Transformers: 4.48.2
|
528 |
+
- PyTorch: 2.5.1+cu124
|
529 |
+
- Accelerate: 1.3.0
|
530 |
+
- Datasets: 3.2.0
|
531 |
+
- Tokenizers: 0.21.0
|
532 |
+
|
533 |
+
## Citation
|
534 |
+
|
535 |
+
### BibTeX
|
536 |
+
|
537 |
+
#### Sentence Transformers
|
538 |
+
```bibtex
|
539 |
+
@inproceedings{reimers-2019-sentence-bert,
|
540 |
+
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
|
541 |
+
author = "Reimers, Nils and Gurevych, Iryna",
|
542 |
+
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
|
543 |
+
month = "11",
|
544 |
+
year = "2019",
|
545 |
+
publisher = "Association for Computational Linguistics",
|
546 |
+
url = "https://arxiv.org/abs/1908.10084",
|
547 |
+
}
|
548 |
+
```
|
549 |
+
|
550 |
+
<!--
|
551 |
+
## Glossary
|
552 |
+
|
553 |
+
*Clearly define terms in order to be accessible across audiences.*
|
554 |
+
-->
|
555 |
+
|
556 |
+
<!--
|
557 |
+
## Model Card Authors
|
558 |
+
|
559 |
+
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
|
560 |
+
-->
|
561 |
+
|
562 |
+
<!--
|
563 |
+
## Model Card Contact
|
564 |
+
|
565 |
+
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
|
566 |
+
-->
|
config.json
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
3 |
+
"architectures": [
|
4 |
+
"ModernBertModel"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 50281,
|
9 |
+
"classifier_activation": "gelu",
|
10 |
+
"classifier_bias": false,
|
11 |
+
"classifier_dropout": 0.0,
|
12 |
+
"classifier_pooling": "mean",
|
13 |
+
"cls_token_id": 50281,
|
14 |
+
"decoder_bias": true,
|
15 |
+
"deterministic_flash_attn": false,
|
16 |
+
"embedding_dropout": 0.0,
|
17 |
+
"eos_token_id": 50282,
|
18 |
+
"global_attn_every_n_layers": 3,
|
19 |
+
"global_rope_theta": 160000.0,
|
20 |
+
"gradient_checkpointing": false,
|
21 |
+
"hidden_activation": "gelu",
|
22 |
+
"hidden_size": 1024,
|
23 |
+
"initializer_cutoff_factor": 2.0,
|
24 |
+
"initializer_range": 0.02,
|
25 |
+
"intermediate_size": 2624,
|
26 |
+
"layer_norm_eps": 1e-05,
|
27 |
+
"local_attention": 128,
|
28 |
+
"local_rope_theta": 10000.0,
|
29 |
+
"max_position_embeddings": 8192,
|
30 |
+
"mlp_bias": false,
|
31 |
+
"mlp_dropout": 0.0,
|
32 |
+
"model_type": "modernbert",
|
33 |
+
"norm_bias": false,
|
34 |
+
"norm_eps": 1e-05,
|
35 |
+
"num_attention_heads": 16,
|
36 |
+
"num_hidden_layers": 28,
|
37 |
+
"pad_token_id": 50283,
|
38 |
+
"position_embedding_type": "absolute",
|
39 |
+
"reference_compile": true,
|
40 |
+
"repad_logits_with_grad": false,
|
41 |
+
"sep_token_id": 50282,
|
42 |
+
"sparse_pred_ignore_index": -100,
|
43 |
+
"sparse_prediction": false,
|
44 |
+
"torch_dtype": "float32",
|
45 |
+
"transformers_version": "4.48.2",
|
46 |
+
"vocab_size": 50368
|
47 |
+
}
|
config_sentence_transformers.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"__version__": {
|
3 |
+
"sentence_transformers": "3.4.1",
|
4 |
+
"transformers": "4.48.2",
|
5 |
+
"pytorch": "2.5.1+cu124"
|
6 |
+
},
|
7 |
+
"prompts": {},
|
8 |
+
"default_prompt_name": null,
|
9 |
+
"similarity_fn_name": "cosine"
|
10 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:097056134416352b46ef70fa45bb710f6b561bd9ac846c976e6b7ace14c8a20b
|
3 |
+
size 1579143688
|
modules.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
{
|
3 |
+
"idx": 0,
|
4 |
+
"name": "0",
|
5 |
+
"path": "",
|
6 |
+
"type": "sentence_transformers.models.Transformer"
|
7 |
+
},
|
8 |
+
{
|
9 |
+
"idx": 1,
|
10 |
+
"name": "1",
|
11 |
+
"path": "1_Pooling",
|
12 |
+
"type": "sentence_transformers.models.Pooling"
|
13 |
+
}
|
14 |
+
]
|
sentence_bert_config.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"max_seq_length": 8192,
|
3 |
+
"do_lower_case": false
|
4 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cls_token": {
|
3 |
+
"content": "[CLS]",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"mask_token": {
|
10 |
+
"content": "[MASK]",
|
11 |
+
"lstrip": true,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "[PAD]",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"sep_token": {
|
24 |
+
"content": "[SEP]",
|
25 |
+
"lstrip": false,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
},
|
30 |
+
"unk_token": {
|
31 |
+
"content": "[UNK]",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false
|
36 |
+
}
|
37 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,945 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "|||IP_ADDRESS|||",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": true,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": false
|
10 |
+
},
|
11 |
+
"1": {
|
12 |
+
"content": "<|padding|>",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"50254": {
|
20 |
+
"content": " ",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": true,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": false
|
26 |
+
},
|
27 |
+
"50255": {
|
28 |
+
"content": " ",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": true,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": false
|
34 |
+
},
|
35 |
+
"50256": {
|
36 |
+
"content": " ",
|
37 |
+
"lstrip": false,
|
38 |
+
"normalized": true,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": false
|
42 |
+
},
|
43 |
+
"50257": {
|
44 |
+
"content": " ",
|
45 |
+
"lstrip": false,
|
46 |
+
"normalized": true,
|
47 |
+
"rstrip": false,
|
48 |
+
"single_word": false,
|
49 |
+
"special": false
|
50 |
+
},
|
51 |
+
"50258": {
|
52 |
+
"content": " ",
|
53 |
+
"lstrip": false,
|
54 |
+
"normalized": true,
|
55 |
+
"rstrip": false,
|
56 |
+
"single_word": false,
|
57 |
+
"special": false
|
58 |
+
},
|
59 |
+
"50259": {
|
60 |
+
"content": " ",
|
61 |
+
"lstrip": false,
|
62 |
+
"normalized": true,
|
63 |
+
"rstrip": false,
|
64 |
+
"single_word": false,
|
65 |
+
"special": false
|
66 |
+
},
|
67 |
+
"50260": {
|
68 |
+
"content": " ",
|
69 |
+
"lstrip": false,
|
70 |
+
"normalized": true,
|
71 |
+
"rstrip": false,
|
72 |
+
"single_word": false,
|
73 |
+
"special": false
|
74 |
+
},
|
75 |
+
"50261": {
|
76 |
+
"content": " ",
|
77 |
+
"lstrip": false,
|
78 |
+
"normalized": true,
|
79 |
+
"rstrip": false,
|
80 |
+
"single_word": false,
|
81 |
+
"special": false
|
82 |
+
},
|
83 |
+
"50262": {
|
84 |
+
"content": " ",
|
85 |
+
"lstrip": false,
|
86 |
+
"normalized": true,
|
87 |
+
"rstrip": false,
|
88 |
+
"single_word": false,
|
89 |
+
"special": false
|
90 |
+
},
|
91 |
+
"50263": {
|
92 |
+
"content": " ",
|
93 |
+
"lstrip": false,
|
94 |
+
"normalized": true,
|
95 |
+
"rstrip": false,
|
96 |
+
"single_word": false,
|
97 |
+
"special": false
|
98 |
+
},
|
99 |
+
"50264": {
|
100 |
+
"content": " ",
|
101 |
+
"lstrip": false,
|
102 |
+
"normalized": true,
|
103 |
+
"rstrip": false,
|
104 |
+
"single_word": false,
|
105 |
+
"special": false
|
106 |
+
},
|
107 |
+
"50265": {
|
108 |
+
"content": " ",
|
109 |
+
"lstrip": false,
|
110 |
+
"normalized": true,
|
111 |
+
"rstrip": false,
|
112 |
+
"single_word": false,
|
113 |
+
"special": false
|
114 |
+
},
|
115 |
+
"50266": {
|
116 |
+
"content": " ",
|
117 |
+
"lstrip": false,
|
118 |
+
"normalized": true,
|
119 |
+
"rstrip": false,
|
120 |
+
"single_word": false,
|
121 |
+
"special": false
|
122 |
+
},
|
123 |
+
"50267": {
|
124 |
+
"content": " ",
|
125 |
+
"lstrip": false,
|
126 |
+
"normalized": true,
|
127 |
+
"rstrip": false,
|
128 |
+
"single_word": false,
|
129 |
+
"special": false
|
130 |
+
},
|
131 |
+
"50268": {
|
132 |
+
"content": " ",
|
133 |
+
"lstrip": false,
|
134 |
+
"normalized": true,
|
135 |
+
"rstrip": false,
|
136 |
+
"single_word": false,
|
137 |
+
"special": false
|
138 |
+
},
|
139 |
+
"50269": {
|
140 |
+
"content": " ",
|
141 |
+
"lstrip": false,
|
142 |
+
"normalized": true,
|
143 |
+
"rstrip": false,
|
144 |
+
"single_word": false,
|
145 |
+
"special": false
|
146 |
+
},
|
147 |
+
"50270": {
|
148 |
+
"content": " ",
|
149 |
+
"lstrip": false,
|
150 |
+
"normalized": true,
|
151 |
+
"rstrip": false,
|
152 |
+
"single_word": false,
|
153 |
+
"special": false
|
154 |
+
},
|
155 |
+
"50271": {
|
156 |
+
"content": " ",
|
157 |
+
"lstrip": false,
|
158 |
+
"normalized": true,
|
159 |
+
"rstrip": false,
|
160 |
+
"single_word": false,
|
161 |
+
"special": false
|
162 |
+
},
|
163 |
+
"50272": {
|
164 |
+
"content": " ",
|
165 |
+
"lstrip": false,
|
166 |
+
"normalized": true,
|
167 |
+
"rstrip": false,
|
168 |
+
"single_word": false,
|
169 |
+
"special": false
|
170 |
+
},
|
171 |
+
"50273": {
|
172 |
+
"content": " ",
|
173 |
+
"lstrip": false,
|
174 |
+
"normalized": true,
|
175 |
+
"rstrip": false,
|
176 |
+
"single_word": false,
|
177 |
+
"special": false
|
178 |
+
},
|
179 |
+
"50274": {
|
180 |
+
"content": " ",
|
181 |
+
"lstrip": false,
|
182 |
+
"normalized": true,
|
183 |
+
"rstrip": false,
|
184 |
+
"single_word": false,
|
185 |
+
"special": false
|
186 |
+
},
|
187 |
+
"50275": {
|
188 |
+
"content": " ",
|
189 |
+
"lstrip": false,
|
190 |
+
"normalized": true,
|
191 |
+
"rstrip": false,
|
192 |
+
"single_word": false,
|
193 |
+
"special": false
|
194 |
+
},
|
195 |
+
"50276": {
|
196 |
+
"content": " ",
|
197 |
+
"lstrip": false,
|
198 |
+
"normalized": true,
|
199 |
+
"rstrip": false,
|
200 |
+
"single_word": false,
|
201 |
+
"special": false
|
202 |
+
},
|
203 |
+
"50277": {
|
204 |
+
"content": "|||EMAIL_ADDRESS|||",
|
205 |
+
"lstrip": false,
|
206 |
+
"normalized": true,
|
207 |
+
"rstrip": false,
|
208 |
+
"single_word": false,
|
209 |
+
"special": false
|
210 |
+
},
|
211 |
+
"50278": {
|
212 |
+
"content": "|||PHONE_NUMBER|||",
|
213 |
+
"lstrip": false,
|
214 |
+
"normalized": true,
|
215 |
+
"rstrip": false,
|
216 |
+
"single_word": false,
|
217 |
+
"special": false
|
218 |
+
},
|
219 |
+
"50279": {
|
220 |
+
"content": "<|endoftext|>",
|
221 |
+
"lstrip": false,
|
222 |
+
"normalized": false,
|
223 |
+
"rstrip": false,
|
224 |
+
"single_word": false,
|
225 |
+
"special": true
|
226 |
+
},
|
227 |
+
"50280": {
|
228 |
+
"content": "[UNK]",
|
229 |
+
"lstrip": false,
|
230 |
+
"normalized": false,
|
231 |
+
"rstrip": false,
|
232 |
+
"single_word": false,
|
233 |
+
"special": true
|
234 |
+
},
|
235 |
+
"50281": {
|
236 |
+
"content": "[CLS]",
|
237 |
+
"lstrip": false,
|
238 |
+
"normalized": false,
|
239 |
+
"rstrip": false,
|
240 |
+
"single_word": false,
|
241 |
+
"special": true
|
242 |
+
},
|
243 |
+
"50282": {
|
244 |
+
"content": "[SEP]",
|
245 |
+
"lstrip": false,
|
246 |
+
"normalized": false,
|
247 |
+
"rstrip": false,
|
248 |
+
"single_word": false,
|
249 |
+
"special": true
|
250 |
+
},
|
251 |
+
"50283": {
|
252 |
+
"content": "[PAD]",
|
253 |
+
"lstrip": false,
|
254 |
+
"normalized": false,
|
255 |
+
"rstrip": false,
|
256 |
+
"single_word": false,
|
257 |
+
"special": true
|
258 |
+
},
|
259 |
+
"50284": {
|
260 |
+
"content": "[MASK]",
|
261 |
+
"lstrip": true,
|
262 |
+
"normalized": false,
|
263 |
+
"rstrip": false,
|
264 |
+
"single_word": false,
|
265 |
+
"special": true
|
266 |
+
},
|
267 |
+
"50285": {
|
268 |
+
"content": "[unused0]",
|
269 |
+
"lstrip": false,
|
270 |
+
"normalized": true,
|
271 |
+
"rstrip": false,
|
272 |
+
"single_word": false,
|
273 |
+
"special": false
|
274 |
+
},
|
275 |
+
"50286": {
|
276 |
+
"content": "[unused1]",
|
277 |
+
"lstrip": false,
|
278 |
+
"normalized": true,
|
279 |
+
"rstrip": false,
|
280 |
+
"single_word": false,
|
281 |
+
"special": false
|
282 |
+
},
|
283 |
+
"50287": {
|
284 |
+
"content": "[unused2]",
|
285 |
+
"lstrip": false,
|
286 |
+
"normalized": true,
|
287 |
+
"rstrip": false,
|
288 |
+
"single_word": false,
|
289 |
+
"special": false
|
290 |
+
},
|
291 |
+
"50288": {
|
292 |
+
"content": "[unused3]",
|
293 |
+
"lstrip": false,
|
294 |
+
"normalized": true,
|
295 |
+
"rstrip": false,
|
296 |
+
"single_word": false,
|
297 |
+
"special": false
|
298 |
+
},
|
299 |
+
"50289": {
|
300 |
+
"content": "[unused4]",
|
301 |
+
"lstrip": false,
|
302 |
+
"normalized": true,
|
303 |
+
"rstrip": false,
|
304 |
+
"single_word": false,
|
305 |
+
"special": false
|
306 |
+
},
|
307 |
+
"50290": {
|
308 |
+
"content": "[unused5]",
|
309 |
+
"lstrip": false,
|
310 |
+
"normalized": true,
|
311 |
+
"rstrip": false,
|
312 |
+
"single_word": false,
|
313 |
+
"special": false
|
314 |
+
},
|
315 |
+
"50291": {
|
316 |
+
"content": "[unused6]",
|
317 |
+
"lstrip": false,
|
318 |
+
"normalized": true,
|
319 |
+
"rstrip": false,
|
320 |
+
"single_word": false,
|
321 |
+
"special": false
|
322 |
+
},
|
323 |
+
"50292": {
|
324 |
+
"content": "[unused7]",
|
325 |
+
"lstrip": false,
|
326 |
+
"normalized": true,
|
327 |
+
"rstrip": false,
|
328 |
+
"single_word": false,
|
329 |
+
"special": false
|
330 |
+
},
|
331 |
+
"50293": {
|
332 |
+
"content": "[unused8]",
|
333 |
+
"lstrip": false,
|
334 |
+
"normalized": true,
|
335 |
+
"rstrip": false,
|
336 |
+
"single_word": false,
|
337 |
+
"special": false
|
338 |
+
},
|
339 |
+
"50294": {
|
340 |
+
"content": "[unused9]",
|
341 |
+
"lstrip": false,
|
342 |
+
"normalized": true,
|
343 |
+
"rstrip": false,
|
344 |
+
"single_word": false,
|
345 |
+
"special": false
|
346 |
+
},
|
347 |
+
"50295": {
|
348 |
+
"content": "[unused10]",
|
349 |
+
"lstrip": false,
|
350 |
+
"normalized": true,
|
351 |
+
"rstrip": false,
|
352 |
+
"single_word": false,
|
353 |
+
"special": false
|
354 |
+
},
|
355 |
+
"50296": {
|
356 |
+
"content": "[unused11]",
|
357 |
+
"lstrip": false,
|
358 |
+
"normalized": true,
|
359 |
+
"rstrip": false,
|
360 |
+
"single_word": false,
|
361 |
+
"special": false
|
362 |
+
},
|
363 |
+
"50297": {
|
364 |
+
"content": "[unused12]",
|
365 |
+
"lstrip": false,
|
366 |
+
"normalized": true,
|
367 |
+
"rstrip": false,
|
368 |
+
"single_word": false,
|
369 |
+
"special": false
|
370 |
+
},
|
371 |
+
"50298": {
|
372 |
+
"content": "[unused13]",
|
373 |
+
"lstrip": false,
|
374 |
+
"normalized": true,
|
375 |
+
"rstrip": false,
|
376 |
+
"single_word": false,
|
377 |
+
"special": false
|
378 |
+
},
|
379 |
+
"50299": {
|
380 |
+
"content": "[unused14]",
|
381 |
+
"lstrip": false,
|
382 |
+
"normalized": true,
|
383 |
+
"rstrip": false,
|
384 |
+
"single_word": false,
|
385 |
+
"special": false
|
386 |
+
},
|
387 |
+
"50300": {
|
388 |
+
"content": "[unused15]",
|
389 |
+
"lstrip": false,
|
390 |
+
"normalized": true,
|
391 |
+
"rstrip": false,
|
392 |
+
"single_word": false,
|
393 |
+
"special": false
|
394 |
+
},
|
395 |
+
"50301": {
|
396 |
+
"content": "[unused16]",
|
397 |
+
"lstrip": false,
|
398 |
+
"normalized": true,
|
399 |
+
"rstrip": false,
|
400 |
+
"single_word": false,
|
401 |
+
"special": false
|
402 |
+
},
|
403 |
+
"50302": {
|
404 |
+
"content": "[unused17]",
|
405 |
+
"lstrip": false,
|
406 |
+
"normalized": true,
|
407 |
+
"rstrip": false,
|
408 |
+
"single_word": false,
|
409 |
+
"special": false
|
410 |
+
},
|
411 |
+
"50303": {
|
412 |
+
"content": "[unused18]",
|
413 |
+
"lstrip": false,
|
414 |
+
"normalized": true,
|
415 |
+
"rstrip": false,
|
416 |
+
"single_word": false,
|
417 |
+
"special": false
|
418 |
+
},
|
419 |
+
"50304": {
|
420 |
+
"content": "[unused19]",
|
421 |
+
"lstrip": false,
|
422 |
+
"normalized": true,
|
423 |
+
"rstrip": false,
|
424 |
+
"single_word": false,
|
425 |
+
"special": false
|
426 |
+
},
|
427 |
+
"50305": {
|
428 |
+
"content": "[unused20]",
|
429 |
+
"lstrip": false,
|
430 |
+
"normalized": true,
|
431 |
+
"rstrip": false,
|
432 |
+
"single_word": false,
|
433 |
+
"special": false
|
434 |
+
},
|
435 |
+
"50306": {
|
436 |
+
"content": "[unused21]",
|
437 |
+
"lstrip": false,
|
438 |
+
"normalized": true,
|
439 |
+
"rstrip": false,
|
440 |
+
"single_word": false,
|
441 |
+
"special": false
|
442 |
+
},
|
443 |
+
"50307": {
|
444 |
+
"content": "[unused22]",
|
445 |
+
"lstrip": false,
|
446 |
+
"normalized": true,
|
447 |
+
"rstrip": false,
|
448 |
+
"single_word": false,
|
449 |
+
"special": false
|
450 |
+
},
|
451 |
+
"50308": {
|
452 |
+
"content": "[unused23]",
|
453 |
+
"lstrip": false,
|
454 |
+
"normalized": true,
|
455 |
+
"rstrip": false,
|
456 |
+
"single_word": false,
|
457 |
+
"special": false
|
458 |
+
},
|
459 |
+
"50309": {
|
460 |
+
"content": "[unused24]",
|
461 |
+
"lstrip": false,
|
462 |
+
"normalized": true,
|
463 |
+
"rstrip": false,
|
464 |
+
"single_word": false,
|
465 |
+
"special": false
|
466 |
+
},
|
467 |
+
"50310": {
|
468 |
+
"content": "[unused25]",
|
469 |
+
"lstrip": false,
|
470 |
+
"normalized": true,
|
471 |
+
"rstrip": false,
|
472 |
+
"single_word": false,
|
473 |
+
"special": false
|
474 |
+
},
|
475 |
+
"50311": {
|
476 |
+
"content": "[unused26]",
|
477 |
+
"lstrip": false,
|
478 |
+
"normalized": true,
|
479 |
+
"rstrip": false,
|
480 |
+
"single_word": false,
|
481 |
+
"special": false
|
482 |
+
},
|
483 |
+
"50312": {
|
484 |
+
"content": "[unused27]",
|
485 |
+
"lstrip": false,
|
486 |
+
"normalized": true,
|
487 |
+
"rstrip": false,
|
488 |
+
"single_word": false,
|
489 |
+
"special": false
|
490 |
+
},
|
491 |
+
"50313": {
|
492 |
+
"content": "[unused28]",
|
493 |
+
"lstrip": false,
|
494 |
+
"normalized": true,
|
495 |
+
"rstrip": false,
|
496 |
+
"single_word": false,
|
497 |
+
"special": false
|
498 |
+
},
|
499 |
+
"50314": {
|
500 |
+
"content": "[unused29]",
|
501 |
+
"lstrip": false,
|
502 |
+
"normalized": true,
|
503 |
+
"rstrip": false,
|
504 |
+
"single_word": false,
|
505 |
+
"special": false
|
506 |
+
},
|
507 |
+
"50315": {
|
508 |
+
"content": "[unused30]",
|
509 |
+
"lstrip": false,
|
510 |
+
"normalized": true,
|
511 |
+
"rstrip": false,
|
512 |
+
"single_word": false,
|
513 |
+
"special": false
|
514 |
+
},
|
515 |
+
"50316": {
|
516 |
+
"content": "[unused31]",
|
517 |
+
"lstrip": false,
|
518 |
+
"normalized": true,
|
519 |
+
"rstrip": false,
|
520 |
+
"single_word": false,
|
521 |
+
"special": false
|
522 |
+
},
|
523 |
+
"50317": {
|
524 |
+
"content": "[unused32]",
|
525 |
+
"lstrip": false,
|
526 |
+
"normalized": true,
|
527 |
+
"rstrip": false,
|
528 |
+
"single_word": false,
|
529 |
+
"special": false
|
530 |
+
},
|
531 |
+
"50318": {
|
532 |
+
"content": "[unused33]",
|
533 |
+
"lstrip": false,
|
534 |
+
"normalized": true,
|
535 |
+
"rstrip": false,
|
536 |
+
"single_word": false,
|
537 |
+
"special": false
|
538 |
+
},
|
539 |
+
"50319": {
|
540 |
+
"content": "[unused34]",
|
541 |
+
"lstrip": false,
|
542 |
+
"normalized": true,
|
543 |
+
"rstrip": false,
|
544 |
+
"single_word": false,
|
545 |
+
"special": false
|
546 |
+
},
|
547 |
+
"50320": {
|
548 |
+
"content": "[unused35]",
|
549 |
+
"lstrip": false,
|
550 |
+
"normalized": true,
|
551 |
+
"rstrip": false,
|
552 |
+
"single_word": false,
|
553 |
+
"special": false
|
554 |
+
},
|
555 |
+
"50321": {
|
556 |
+
"content": "[unused36]",
|
557 |
+
"lstrip": false,
|
558 |
+
"normalized": true,
|
559 |
+
"rstrip": false,
|
560 |
+
"single_word": false,
|
561 |
+
"special": false
|
562 |
+
},
|
563 |
+
"50322": {
|
564 |
+
"content": "[unused37]",
|
565 |
+
"lstrip": false,
|
566 |
+
"normalized": true,
|
567 |
+
"rstrip": false,
|
568 |
+
"single_word": false,
|
569 |
+
"special": false
|
570 |
+
},
|
571 |
+
"50323": {
|
572 |
+
"content": "[unused38]",
|
573 |
+
"lstrip": false,
|
574 |
+
"normalized": true,
|
575 |
+
"rstrip": false,
|
576 |
+
"single_word": false,
|
577 |
+
"special": false
|
578 |
+
},
|
579 |
+
"50324": {
|
580 |
+
"content": "[unused39]",
|
581 |
+
"lstrip": false,
|
582 |
+
"normalized": true,
|
583 |
+
"rstrip": false,
|
584 |
+
"single_word": false,
|
585 |
+
"special": false
|
586 |
+
},
|
587 |
+
"50325": {
|
588 |
+
"content": "[unused40]",
|
589 |
+
"lstrip": false,
|
590 |
+
"normalized": true,
|
591 |
+
"rstrip": false,
|
592 |
+
"single_word": false,
|
593 |
+
"special": false
|
594 |
+
},
|
595 |
+
"50326": {
|
596 |
+
"content": "[unused41]",
|
597 |
+
"lstrip": false,
|
598 |
+
"normalized": true,
|
599 |
+
"rstrip": false,
|
600 |
+
"single_word": false,
|
601 |
+
"special": false
|
602 |
+
},
|
603 |
+
"50327": {
|
604 |
+
"content": "[unused42]",
|
605 |
+
"lstrip": false,
|
606 |
+
"normalized": true,
|
607 |
+
"rstrip": false,
|
608 |
+
"single_word": false,
|
609 |
+
"special": false
|
610 |
+
},
|
611 |
+
"50328": {
|
612 |
+
"content": "[unused43]",
|
613 |
+
"lstrip": false,
|
614 |
+
"normalized": true,
|
615 |
+
"rstrip": false,
|
616 |
+
"single_word": false,
|
617 |
+
"special": false
|
618 |
+
},
|
619 |
+
"50329": {
|
620 |
+
"content": "[unused44]",
|
621 |
+
"lstrip": false,
|
622 |
+
"normalized": true,
|
623 |
+
"rstrip": false,
|
624 |
+
"single_word": false,
|
625 |
+
"special": false
|
626 |
+
},
|
627 |
+
"50330": {
|
628 |
+
"content": "[unused45]",
|
629 |
+
"lstrip": false,
|
630 |
+
"normalized": true,
|
631 |
+
"rstrip": false,
|
632 |
+
"single_word": false,
|
633 |
+
"special": false
|
634 |
+
},
|
635 |
+
"50331": {
|
636 |
+
"content": "[unused46]",
|
637 |
+
"lstrip": false,
|
638 |
+
"normalized": true,
|
639 |
+
"rstrip": false,
|
640 |
+
"single_word": false,
|
641 |
+
"special": false
|
642 |
+
},
|
643 |
+
"50332": {
|
644 |
+
"content": "[unused47]",
|
645 |
+
"lstrip": false,
|
646 |
+
"normalized": true,
|
647 |
+
"rstrip": false,
|
648 |
+
"single_word": false,
|
649 |
+
"special": false
|
650 |
+
},
|
651 |
+
"50333": {
|
652 |
+
"content": "[unused48]",
|
653 |
+
"lstrip": false,
|
654 |
+
"normalized": true,
|
655 |
+
"rstrip": false,
|
656 |
+
"single_word": false,
|
657 |
+
"special": false
|
658 |
+
},
|
659 |
+
"50334": {
|
660 |
+
"content": "[unused49]",
|
661 |
+
"lstrip": false,
|
662 |
+
"normalized": true,
|
663 |
+
"rstrip": false,
|
664 |
+
"single_word": false,
|
665 |
+
"special": false
|
666 |
+
},
|
667 |
+
"50335": {
|
668 |
+
"content": "[unused50]",
|
669 |
+
"lstrip": false,
|
670 |
+
"normalized": true,
|
671 |
+
"rstrip": false,
|
672 |
+
"single_word": false,
|
673 |
+
"special": false
|
674 |
+
},
|
675 |
+
"50336": {
|
676 |
+
"content": "[unused51]",
|
677 |
+
"lstrip": false,
|
678 |
+
"normalized": true,
|
679 |
+
"rstrip": false,
|
680 |
+
"single_word": false,
|
681 |
+
"special": false
|
682 |
+
},
|
683 |
+
"50337": {
|
684 |
+
"content": "[unused52]",
|
685 |
+
"lstrip": false,
|
686 |
+
"normalized": true,
|
687 |
+
"rstrip": false,
|
688 |
+
"single_word": false,
|
689 |
+
"special": false
|
690 |
+
},
|
691 |
+
"50338": {
|
692 |
+
"content": "[unused53]",
|
693 |
+
"lstrip": false,
|
694 |
+
"normalized": true,
|
695 |
+
"rstrip": false,
|
696 |
+
"single_word": false,
|
697 |
+
"special": false
|
698 |
+
},
|
699 |
+
"50339": {
|
700 |
+
"content": "[unused54]",
|
701 |
+
"lstrip": false,
|
702 |
+
"normalized": true,
|
703 |
+
"rstrip": false,
|
704 |
+
"single_word": false,
|
705 |
+
"special": false
|
706 |
+
},
|
707 |
+
"50340": {
|
708 |
+
"content": "[unused55]",
|
709 |
+
"lstrip": false,
|
710 |
+
"normalized": true,
|
711 |
+
"rstrip": false,
|
712 |
+
"single_word": false,
|
713 |
+
"special": false
|
714 |
+
},
|
715 |
+
"50341": {
|
716 |
+
"content": "[unused56]",
|
717 |
+
"lstrip": false,
|
718 |
+
"normalized": true,
|
719 |
+
"rstrip": false,
|
720 |
+
"single_word": false,
|
721 |
+
"special": false
|
722 |
+
},
|
723 |
+
"50342": {
|
724 |
+
"content": "[unused57]",
|
725 |
+
"lstrip": false,
|
726 |
+
"normalized": true,
|
727 |
+
"rstrip": false,
|
728 |
+
"single_word": false,
|
729 |
+
"special": false
|
730 |
+
},
|
731 |
+
"50343": {
|
732 |
+
"content": "[unused58]",
|
733 |
+
"lstrip": false,
|
734 |
+
"normalized": true,
|
735 |
+
"rstrip": false,
|
736 |
+
"single_word": false,
|
737 |
+
"special": false
|
738 |
+
},
|
739 |
+
"50344": {
|
740 |
+
"content": "[unused59]",
|
741 |
+
"lstrip": false,
|
742 |
+
"normalized": true,
|
743 |
+
"rstrip": false,
|
744 |
+
"single_word": false,
|
745 |
+
"special": false
|
746 |
+
},
|
747 |
+
"50345": {
|
748 |
+
"content": "[unused60]",
|
749 |
+
"lstrip": false,
|
750 |
+
"normalized": true,
|
751 |
+
"rstrip": false,
|
752 |
+
"single_word": false,
|
753 |
+
"special": false
|
754 |
+
},
|
755 |
+
"50346": {
|
756 |
+
"content": "[unused61]",
|
757 |
+
"lstrip": false,
|
758 |
+
"normalized": true,
|
759 |
+
"rstrip": false,
|
760 |
+
"single_word": false,
|
761 |
+
"special": false
|
762 |
+
},
|
763 |
+
"50347": {
|
764 |
+
"content": "[unused62]",
|
765 |
+
"lstrip": false,
|
766 |
+
"normalized": true,
|
767 |
+
"rstrip": false,
|
768 |
+
"single_word": false,
|
769 |
+
"special": false
|
770 |
+
},
|
771 |
+
"50348": {
|
772 |
+
"content": "[unused63]",
|
773 |
+
"lstrip": false,
|
774 |
+
"normalized": true,
|
775 |
+
"rstrip": false,
|
776 |
+
"single_word": false,
|
777 |
+
"special": false
|
778 |
+
},
|
779 |
+
"50349": {
|
780 |
+
"content": "[unused64]",
|
781 |
+
"lstrip": false,
|
782 |
+
"normalized": true,
|
783 |
+
"rstrip": false,
|
784 |
+
"single_word": false,
|
785 |
+
"special": false
|
786 |
+
},
|
787 |
+
"50350": {
|
788 |
+
"content": "[unused65]",
|
789 |
+
"lstrip": false,
|
790 |
+
"normalized": true,
|
791 |
+
"rstrip": false,
|
792 |
+
"single_word": false,
|
793 |
+
"special": false
|
794 |
+
},
|
795 |
+
"50351": {
|
796 |
+
"content": "[unused66]",
|
797 |
+
"lstrip": false,
|
798 |
+
"normalized": true,
|
799 |
+
"rstrip": false,
|
800 |
+
"single_word": false,
|
801 |
+
"special": false
|
802 |
+
},
|
803 |
+
"50352": {
|
804 |
+
"content": "[unused67]",
|
805 |
+
"lstrip": false,
|
806 |
+
"normalized": true,
|
807 |
+
"rstrip": false,
|
808 |
+
"single_word": false,
|
809 |
+
"special": false
|
810 |
+
},
|
811 |
+
"50353": {
|
812 |
+
"content": "[unused68]",
|
813 |
+
"lstrip": false,
|
814 |
+
"normalized": true,
|
815 |
+
"rstrip": false,
|
816 |
+
"single_word": false,
|
817 |
+
"special": false
|
818 |
+
},
|
819 |
+
"50354": {
|
820 |
+
"content": "[unused69]",
|
821 |
+
"lstrip": false,
|
822 |
+
"normalized": true,
|
823 |
+
"rstrip": false,
|
824 |
+
"single_word": false,
|
825 |
+
"special": false
|
826 |
+
},
|
827 |
+
"50355": {
|
828 |
+
"content": "[unused70]",
|
829 |
+
"lstrip": false,
|
830 |
+
"normalized": true,
|
831 |
+
"rstrip": false,
|
832 |
+
"single_word": false,
|
833 |
+
"special": false
|
834 |
+
},
|
835 |
+
"50356": {
|
836 |
+
"content": "[unused71]",
|
837 |
+
"lstrip": false,
|
838 |
+
"normalized": true,
|
839 |
+
"rstrip": false,
|
840 |
+
"single_word": false,
|
841 |
+
"special": false
|
842 |
+
},
|
843 |
+
"50357": {
|
844 |
+
"content": "[unused72]",
|
845 |
+
"lstrip": false,
|
846 |
+
"normalized": true,
|
847 |
+
"rstrip": false,
|
848 |
+
"single_word": false,
|
849 |
+
"special": false
|
850 |
+
},
|
851 |
+
"50358": {
|
852 |
+
"content": "[unused73]",
|
853 |
+
"lstrip": false,
|
854 |
+
"normalized": true,
|
855 |
+
"rstrip": false,
|
856 |
+
"single_word": false,
|
857 |
+
"special": false
|
858 |
+
},
|
859 |
+
"50359": {
|
860 |
+
"content": "[unused74]",
|
861 |
+
"lstrip": false,
|
862 |
+
"normalized": true,
|
863 |
+
"rstrip": false,
|
864 |
+
"single_word": false,
|
865 |
+
"special": false
|
866 |
+
},
|
867 |
+
"50360": {
|
868 |
+
"content": "[unused75]",
|
869 |
+
"lstrip": false,
|
870 |
+
"normalized": true,
|
871 |
+
"rstrip": false,
|
872 |
+
"single_word": false,
|
873 |
+
"special": false
|
874 |
+
},
|
875 |
+
"50361": {
|
876 |
+
"content": "[unused76]",
|
877 |
+
"lstrip": false,
|
878 |
+
"normalized": true,
|
879 |
+
"rstrip": false,
|
880 |
+
"single_word": false,
|
881 |
+
"special": false
|
882 |
+
},
|
883 |
+
"50362": {
|
884 |
+
"content": "[unused77]",
|
885 |
+
"lstrip": false,
|
886 |
+
"normalized": true,
|
887 |
+
"rstrip": false,
|
888 |
+
"single_word": false,
|
889 |
+
"special": false
|
890 |
+
},
|
891 |
+
"50363": {
|
892 |
+
"content": "[unused78]",
|
893 |
+
"lstrip": false,
|
894 |
+
"normalized": true,
|
895 |
+
"rstrip": false,
|
896 |
+
"single_word": false,
|
897 |
+
"special": false
|
898 |
+
},
|
899 |
+
"50364": {
|
900 |
+
"content": "[unused79]",
|
901 |
+
"lstrip": false,
|
902 |
+
"normalized": true,
|
903 |
+
"rstrip": false,
|
904 |
+
"single_word": false,
|
905 |
+
"special": false
|
906 |
+
},
|
907 |
+
"50365": {
|
908 |
+
"content": "[unused80]",
|
909 |
+
"lstrip": false,
|
910 |
+
"normalized": true,
|
911 |
+
"rstrip": false,
|
912 |
+
"single_word": false,
|
913 |
+
"special": false
|
914 |
+
},
|
915 |
+
"50366": {
|
916 |
+
"content": "[unused81]",
|
917 |
+
"lstrip": false,
|
918 |
+
"normalized": true,
|
919 |
+
"rstrip": false,
|
920 |
+
"single_word": false,
|
921 |
+
"special": false
|
922 |
+
},
|
923 |
+
"50367": {
|
924 |
+
"content": "[unused82]",
|
925 |
+
"lstrip": false,
|
926 |
+
"normalized": true,
|
927 |
+
"rstrip": false,
|
928 |
+
"single_word": false,
|
929 |
+
"special": false
|
930 |
+
}
|
931 |
+
},
|
932 |
+
"clean_up_tokenization_spaces": true,
|
933 |
+
"cls_token": "[CLS]",
|
934 |
+
"extra_special_tokens": {},
|
935 |
+
"mask_token": "[MASK]",
|
936 |
+
"model_input_names": [
|
937 |
+
"input_ids",
|
938 |
+
"attention_mask"
|
939 |
+
],
|
940 |
+
"model_max_length": 8192,
|
941 |
+
"pad_token": "[PAD]",
|
942 |
+
"sep_token": "[SEP]",
|
943 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
944 |
+
"unk_token": "[UNK]"
|
945 |
+
}
|