kibrq commited on
Commit
623e9da
·
1 Parent(s): 8f70f91

Update model

Browse files
Files changed (2) hide show
  1. config.json +66 -2
  2. configuration_greedy.py +4 -4
config.json CHANGED
@@ -8,8 +8,72 @@
8
  },
9
  "eos_token_id": 8,
10
  "pad_token_id": 9,
11
- "reciprocals": null,
12
- "reducables": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  "torch_dtype": "float32",
14
  "transformers_version": "4.21.1",
15
  "vocab_size": 10
 
8
  },
9
  "eos_token_id": 8,
10
  "pad_token_id": 9,
11
+ "reciprocals": [
12
+ [
13
+ 4,
14
+ 3
15
+ ],
16
+ [
17
+ 5,
18
+ 2
19
+ ],
20
+ [
21
+ 6,
22
+ 1
23
+ ],
24
+ [
25
+ 7,
26
+ 0
27
+ ]
28
+ ],
29
+ "reducables": [
30
+ [
31
+ [
32
+ 4
33
+ ],
34
+ [
35
+ 3
36
+ ]
37
+ ],
38
+ [
39
+ [
40
+ 5
41
+ ],
42
+ [
43
+ 2
44
+ ]
45
+ ],
46
+ [
47
+ [
48
+ 6
49
+ ],
50
+ [
51
+ 1
52
+ ]
53
+ ],
54
+ [
55
+ [
56
+ 7
57
+ ],
58
+ [
59
+ 0
60
+ ]
61
+ ],
62
+ [
63
+ [
64
+ 4,
65
+ 5,
66
+ 6,
67
+ 7
68
+ ],
69
+ [
70
+ 0,
71
+ 1,
72
+ 2,
73
+ 3
74
+ ]
75
+ ]
76
+ ],
77
  "torch_dtype": "float32",
78
  "transformers_version": "4.21.1",
79
  "vocab_size": 10
configuration_greedy.py CHANGED
@@ -18,13 +18,13 @@ class GreedyConfig(PretrainedConfig):
18
 
19
  freegroup_generators = list(range(1, freegroup_dimension + 1))
20
 
21
- reciprocals = []
22
  for x in freegroup_generators:
23
  a, b = tokenizer.convert_tokens_to_ids([str(x), str(-x)])
24
- reciprocals.append([a, b])
25
 
26
- reducables = [[] for _ in range(freegroup_dimension + 1)]
27
- for reducable, closure_generator in zip(reducables, [[x] for x in freegroup_generators] + [freegroup_generators[::]]):
28
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, closure_generator))))
29
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, tools.reciprocal(closure_generator)))))
30
 
 
18
 
19
  freegroup_generators = list(range(1, freegroup_dimension + 1))
20
 
21
+ self.reciprocals = []
22
  for x in freegroup_generators:
23
  a, b = tokenizer.convert_tokens_to_ids([str(x), str(-x)])
24
+ self.reciprocals.append([a, b])
25
 
26
+ self.reducables = [[] for _ in range(freegroup_dimension + 1)]
27
+ for reducable, closure_generator in zip(self.reducables, [[x] for x in freegroup_generators] + [freegroup_generators[::]]):
28
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, closure_generator))))
29
  reducable.append(tokenizer.convert_tokens_to_ids(list(map(str, tools.reciprocal(closure_generator)))))
30