Tirthankar commited on
Commit
daf1741
·
1 Parent(s): 8050c4a

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +6 -0
  2. tokenizer_config.json +48 -0
  3. vocab.json +76 -0
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": true,
6
+ "normalized": false,
7
+ "rstrip": true,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "1": {
12
+ "content": "[UNK]",
13
+ "lstrip": true,
14
+ "normalized": false,
15
+ "rstrip": true,
16
+ "single_word": false,
17
+ "special": false
18
+ },
19
+ "3": {
20
+ "content": "<s>",
21
+ "lstrip": true,
22
+ "normalized": false,
23
+ "rstrip": true,
24
+ "single_word": false,
25
+ "special": false
26
+ },
27
+ "4": {
28
+ "content": "</s>",
29
+ "lstrip": true,
30
+ "normalized": false,
31
+ "rstrip": true,
32
+ "single_word": false,
33
+ "special": false
34
+ }
35
+ },
36
+ "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "do_lower_case": false,
39
+ "eos_token": "</s>",
40
+ "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "[PAD]",
42
+ "processor_class": "Wav2Vec2ProcessorWithLM",
43
+ "replace_word_delimiter_char": " ",
44
+ "target_lang": null,
45
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
46
+ "unk_token": "[UNK]",
47
+ "word_delimiter_token": "|"
48
+ }
vocab.json ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</s>": 4,
3
+ "<s>": 3,
4
+ "[PAD]": 0,
5
+ "[UNK]": 1,
6
+ "|": 2,
7
+ "،": 11,
8
+ "؟": 16,
9
+ "ؠ": 73,
10
+ "آ": 33,
11
+ "أ": 42,
12
+ "ؤ": 22,
13
+ "إ": 31,
14
+ "ا": 59,
15
+ "ب": 71,
16
+ "ت": 47,
17
+ "ث": 46,
18
+ "ج": 53,
19
+ "ح": 49,
20
+ "خ": 65,
21
+ "د": 23,
22
+ "ذ": 36,
23
+ "ر": 39,
24
+ "ز": 58,
25
+ "س": 32,
26
+ "ش": 24,
27
+ "ص": 63,
28
+ "ض": 27,
29
+ "ط": 61,
30
+ "ظ": 6,
31
+ "ع": 44,
32
+ "غ": 35,
33
+ "ف": 41,
34
+ "ق": 68,
35
+ "ل": 38,
36
+ "م": 17,
37
+ "ن": 60,
38
+ "و": 28,
39
+ "ً": 30,
40
+ "َ": 54,
41
+ "ُ": 18,
42
+ "ِ": 70,
43
+ "ّ": 19,
44
+ "ْ": 10,
45
+ "ٓ": 66,
46
+ "ٔ": 7,
47
+ "ٕ": 21,
48
+ "ٖ": 8,
49
+ "ٗ": 29,
50
+ "ٚ": 72,
51
+ "ٟ": 15,
52
+ "ٮ": 62,
53
+ "ٲ": 64,
54
+ "ٹ": 14,
55
+ "پ": 5,
56
+ "چ": 57,
57
+ "ڈ": 69,
58
+ "ڑ": 55,
59
+ "ژ": 20,
60
+ "ک": 34,
61
+ "گ": 48,
62
+ "ں": 52,
63
+ "ھ": 50,
64
+ "ہ": 40,
65
+ "ۄ": 43,
66
+ "ۆ": 37,
67
+ "ی": 25,
68
+ "ے": 9,
69
+ "۔": 12,
70
+ "۱": 26,
71
+ "۵": 51,
72
+ "۷": 45,
73
+ "۸": 67,
74
+ "۹": 56,
75
+ "“": 13
76
+ }