kibrq commited on
Commit
d329cab
·
1 Parent(s): 56bad2a

Update model

Browse files
Files changed (2) hide show
  1. config.json +2 -52
  2. configuration_greedy.py +11 -9
config.json CHANGED
@@ -8,58 +8,8 @@
8
  },
9
  "eos_token_id": 8,
10
  "pad_token_id": 9,
11
- "reciprocals": [
12
- [
13
- 4,
14
- 3
15
- ],
16
- [
17
- 5,
18
- 2
19
- ],
20
- [
21
- 6,
22
- 1
23
- ]
24
- ],
25
- "reducables": [
26
- [
27
- [
28
- 4
29
- ],
30
- [
31
- 3
32
- ]
33
- ],
34
- [
35
- [
36
- 5
37
- ],
38
- [
39
- 2
40
- ]
41
- ],
42
- [
43
- [
44
- 6
45
- ],
46
- [
47
- 1
48
- ]
49
- ],
50
- [
51
- [
52
- 4,
53
- 5,
54
- 6
55
- ],
56
- [
57
- 1,
58
- 2,
59
- 3
60
- ]
61
- ]
62
- ],
63
  "torch_dtype": "float32",
64
  "transformers_version": "4.21.1",
65
  "vocab_size": 10
 
8
  },
9
  "eos_token_id": 8,
10
  "pad_token_id": 9,
11
+ "reciprocals": null,
12
+ "reducables": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  "torch_dtype": "float32",
14
  "transformers_version": "4.21.1",
15
  "vocab_size": 10
configuration_greedy.py CHANGED
@@ -5,6 +5,16 @@ class GreedyConfig(PretrainedConfig):
5
 
6
  @classmethod
7
  def from_tokenizer(cls, freegroup_dimension, tokenizer: PreTrainedTokenizerBase, **kwargs):
 
 
 
 
 
 
 
 
 
 
8
 
9
  freegroup_generators = list(range(1, freegroup_dimension + 1))
10
 
@@ -17,15 +27,7 @@ class GreedyConfig(PretrainedConfig):
17
  for reducable, closure_generator in zip(reducables, [[x] for x in freegroup_generators] + [freegroup_generators[::]]):
18
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, closure_generator))))
19
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, tools.reciprocal(closure_generator)))))
20
-
21
- return cls(
22
- reciprocals = reciprocals,
23
- reducables = reducables,
24
- vocab_size = len(tokenizer),
25
- eos_token_id = tokenizer.eos_token_id,
26
- pad_token_id = tokenizer.pad_token_id,
27
- **kwargs
28
- )
29
 
30
  def __init__(self, **kwargs):
31
  # reciporcals: List[List[int]]: i.e. ['x', 'X'], ...
 
5
 
6
  @classmethod
7
  def from_tokenizer(cls, freegroup_dimension, tokenizer: PreTrainedTokenizerBase, **kwargs):
8
+ config = cls(
9
+ vocab_size = len(tokenizer),
10
+ eos_token_id = tokenizer.eos_token_id,
11
+ pad_token_id = tokenizer.pad_token_id,
12
+ **kwargs
13
+ )
14
+ config._from_tokenizer(freegroup_dimension, tokenizer)
15
+ return config
16
+
17
+ def _from_tokenizer(self, freegroup_dimension, tokenizer):
18
 
19
  freegroup_generators = list(range(1, freegroup_dimension + 1))
20
 
 
27
  for reducable, closure_generator in zip(reducables, [[x] for x in freegroup_generators] + [freegroup_generators[::]]):
28
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, closure_generator))))
29
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, tools.reciprocal(closure_generator)))))
30
+
 
 
 
 
 
 
 
 
31
 
32
  def __init__(self, **kwargs):
33
  # reciporcals: List[List[int]]: i.e. ['x', 'X'], ...