robertauny commited on
Commit
769c0fa
·
verified ·
1 Parent(s): d83f861

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "valhalla/distilt5-qg-hl-6-4",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
@@ -12,30 +12,13 @@
12
  "dropout_rate": 0.1,
13
  "eos_token_id": 1,
14
  "feed_forward_proj": "relu",
15
- "init_metadata": {
16
- "copied_decoder_layers": [
17
- 0,
18
- 1,
19
- 3,
20
- 5
21
- ],
22
- "copied_encoder_layers": [
23
- 0,
24
- 1,
25
- 2,
26
- 3,
27
- 4,
28
- 5
29
- ],
30
- "teacher_type": "t5"
31
- },
32
  "initializer_factor": 1.0,
33
  "is_encoder_decoder": true,
34
  "is_gated_act": false,
35
  "layer_norm_epsilon": 1e-06,
36
  "model_type": "t5",
37
  "n_positions": 512,
38
- "num_decoder_layers": 4,
39
  "num_heads": 8,
40
  "num_layers": 6,
41
  "output_past": true,
@@ -43,16 +26,36 @@
43
  "relative_attention_max_distance": 128,
44
  "relative_attention_num_buckets": 32,
45
  "task_specific_params": {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  "translation_en_to_fr": {
47
  "early_stopping": true,
48
- "length_penalty": 1.0,
49
- "max_length": 32,
 
 
 
 
 
50
  "num_beams": 4,
51
- "prefix": ""
52
  }
53
  },
54
  "torch_dtype": "float32",
55
- "transformers_version": "4.45.2",
56
  "use_cache": true,
57
- "vocab_size": 32102
58
  }
 
1
  {
2
+ "_name_or_path": "t5-small",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
 
12
  "dropout_rate": 0.1,
13
  "eos_token_id": 1,
14
  "feed_forward_proj": "relu",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  "initializer_factor": 1.0,
16
  "is_encoder_decoder": true,
17
  "is_gated_act": false,
18
  "layer_norm_epsilon": 1e-06,
19
  "model_type": "t5",
20
  "n_positions": 512,
21
+ "num_decoder_layers": 6,
22
  "num_heads": 8,
23
  "num_layers": 6,
24
  "output_past": true,
 
26
  "relative_attention_max_distance": 128,
27
  "relative_attention_num_buckets": 32,
28
  "task_specific_params": {
29
+ "summarization": {
30
+ "early_stopping": true,
31
+ "length_penalty": 2.0,
32
+ "max_length": 200,
33
+ "min_length": 30,
34
+ "no_repeat_ngram_size": 3,
35
+ "num_beams": 4,
36
+ "prefix": "summarize: "
37
+ },
38
+ "translation_en_to_de": {
39
+ "early_stopping": true,
40
+ "max_length": 300,
41
+ "num_beams": 4,
42
+ "prefix": "translate English to German: "
43
+ },
44
  "translation_en_to_fr": {
45
  "early_stopping": true,
46
+ "max_length": 300,
47
+ "num_beams": 4,
48
+ "prefix": "translate English to French: "
49
+ },
50
+ "translation_en_to_ro": {
51
+ "early_stopping": true,
52
+ "max_length": 300,
53
  "num_beams": 4,
54
+ "prefix": "translate English to Romanian: "
55
  }
56
  },
57
  "torch_dtype": "float32",
58
+ "transformers_version": "4.46.3",
59
  "use_cache": true,
60
+ "vocab_size": 32128
61
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "decoder_start_token_id": 0,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.45.2"
7
  }
 
3
  "decoder_start_token_id": 0,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.46.3"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0b0125e44c8d9c9b57df56f2990cbaa5e9f71d99508e06cda2635dcc944320b5
3
- size 208418832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:881ecd3d2e20cfb1ddf96ac598ad90a610007dfa4ff04cfc024e4afd118bc686
3
+ size 242041896
special_tokens_map.json CHANGED
@@ -101,7 +101,25 @@
101
  "<extra_id_98>",
102
  "<extra_id_99>"
103
  ],
104
- "eos_token": "</s>",
105
- "pad_token": "<pad>",
106
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
  }
 
101
  "<extra_id_98>",
102
  "<extra_id_99>"
103
  ],
104
+ "eos_token": {
105
+ "content": "</s>",
106
+ "lstrip": false,
107
+ "normalized": false,
108
+ "rstrip": false,
109
+ "single_word": false
110
+ },
111
+ "pad_token": {
112
+ "content": "<pad>",
113
+ "lstrip": false,
114
+ "normalized": false,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "unk_token": {
119
+ "content": "<unk>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ }
125
  }
tokenizer_config.json CHANGED
@@ -927,10 +927,10 @@
927
  "<extra_id_98>",
928
  "<extra_id_99>"
929
  ],
930
- "clean_up_tokenization_spaces": false,
931
  "eos_token": "</s>",
932
  "extra_ids": 100,
933
- "model_max_length": 1000000000000000019884624838656,
934
  "pad_token": "<pad>",
935
  "tokenizer_class": "T5Tokenizer",
936
  "unk_token": "<unk>"
 
927
  "<extra_id_98>",
928
  "<extra_id_99>"
929
  ],
930
+ "clean_up_tokenization_spaces": true,
931
  "eos_token": "</s>",
932
  "extra_ids": 100,
933
+ "model_max_length": 512,
934
  "pad_token": "<pad>",
935
  "tokenizer_class": "T5Tokenizer",
936
  "unk_token": "<unk>"