fabbrorada commited on
Commit
4e6649c
·
verified ·
1 Parent(s): 2374b12

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -41,6 +41,7 @@
41
  "num_heads": 12,
42
  "num_layers": 12,
43
  "pad_token_id": 0,
 
44
  "relative_attention_max_distance": 128,
45
  "relative_attention_num_buckets": 32,
46
  "torch_dtype": "float32",
 
41
  "num_heads": 12,
42
  "num_layers": 12,
43
  "pad_token_id": 0,
44
+ "reg_token_id": 1,
45
  "relative_attention_max_distance": 128,
46
  "relative_attention_num_buckets": 32,
47
  "torch_dtype": "float32",