Update tokenizer_config.json
Browse files- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -149,7 +149,7 @@
|
|
149 |
"extra_special_tokens": {},
|
150 |
"model_max_length": 2048,
|
151 |
"pad_token": "<empty_output>",
|
152 |
-
"padding_side": "
|
153 |
"tokenizer_class": "GPT2Tokenizer",
|
154 |
"unk_token": "<|endoftext|>",
|
155 |
"vocab_size": 49152
|
|
|
149 |
"extra_special_tokens": {},
|
150 |
"model_max_length": 2048,
|
151 |
"pad_token": "<empty_output>",
|
152 |
+
"padding_side": "left",
|
153 |
"tokenizer_class": "GPT2Tokenizer",
|
154 |
"unk_token": "<|endoftext|>",
|
155 |
"vocab_size": 49152
|