Moleys commited on
Commit
1b90fae
·
verified ·
1 Parent(s): aee2cea

Upload model

Browse files
Files changed (3) hide show
  1. config.json +56 -56
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -1,56 +1,56 @@
1
- {
2
- "_name_or_path": "zizi",
3
- "activation_dropout": 0.0,
4
- "activation_function": "swish",
5
- "add_bias_logits": false,
6
- "add_final_layer_norm": false,
7
- "architectures": [
8
- "MarianMTModel"
9
- ],
10
- "attention_dropout": 0.0,
11
- "bos_token_id": 0,
12
- "classif_dropout": 0.0,
13
- "classifier_dropout": 0.0,
14
- "d_model": 256,
15
- "decoder_attention_heads": 4,
16
- "decoder_ffn_dim": 1024,
17
- "decoder_layerdrop": 0.0,
18
- "decoder_layers": 4,
19
- "decoder_start_token_id": 29066,
20
- "decoder_vocab_size": 29067,
21
- "dropout": 0.1,
22
- "encoder_attention_heads": 4,
23
- "encoder_ffn_dim": 1024,
24
- "encoder_layerdrop": 0.0,
25
- "encoder_layers": 4,
26
- "eos_token_id": 0,
27
- "extra_pos_embeddings": 29067,
28
- "forced_eos_token_id": 0,
29
- "id2label": {
30
- "0": "LABEL_0",
31
- "1": "LABEL_1",
32
- "2": "LABEL_2"
33
- },
34
- "init_std": 0.02,
35
- "is_encoder_decoder": true,
36
- "label2id": {
37
- "LABEL_0": 0,
38
- "LABEL_1": 1,
39
- "LABEL_2": 2
40
- },
41
- "max_length": null,
42
- "max_position_embeddings": 512,
43
- "model_type": "marian",
44
- "normalize_before": false,
45
- "normalize_embedding": false,
46
- "num_beams": null,
47
- "num_hidden_layers": 4,
48
- "pad_token_id": 29066,
49
- "scale_embedding": true,
50
- "share_encoder_decoder_embeddings": true,
51
- "static_position_embeddings": true,
52
- "torch_dtype": "float32",
53
- "transformers_version": "4.46.3",
54
- "use_cache": true,
55
- "vocab_size": 29067
56
- }
 
1
+ {
2
+ "_name_or_path": "Moleys/yiyi",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "swish",
5
+ "add_bias_logits": false,
6
+ "add_final_layer_norm": false,
7
+ "architectures": [
8
+ "MarianMTModel"
9
+ ],
10
+ "attention_dropout": 0.0,
11
+ "bos_token_id": 0,
12
+ "classif_dropout": 0.0,
13
+ "classifier_dropout": 0.0,
14
+ "d_model": 256,
15
+ "decoder_attention_heads": 4,
16
+ "decoder_ffn_dim": 1024,
17
+ "decoder_layerdrop": 0.0,
18
+ "decoder_layers": 4,
19
+ "decoder_start_token_id": 29066,
20
+ "decoder_vocab_size": 29067,
21
+ "dropout": 0.1,
22
+ "encoder_attention_heads": 4,
23
+ "encoder_ffn_dim": 1024,
24
+ "encoder_layerdrop": 0.0,
25
+ "encoder_layers": 4,
26
+ "eos_token_id": 0,
27
+ "extra_pos_embeddings": 29067,
28
+ "forced_eos_token_id": 0,
29
+ "id2label": {
30
+ "0": "LABEL_0",
31
+ "1": "LABEL_1",
32
+ "2": "LABEL_2"
33
+ },
34
+ "init_std": 0.02,
35
+ "is_encoder_decoder": true,
36
+ "label2id": {
37
+ "LABEL_0": 0,
38
+ "LABEL_1": 1,
39
+ "LABEL_2": 2
40
+ },
41
+ "max_length": null,
42
+ "max_position_embeddings": 512,
43
+ "model_type": "marian",
44
+ "normalize_before": false,
45
+ "normalize_embedding": false,
46
+ "num_beams": null,
47
+ "num_hidden_layers": 4,
48
+ "pad_token_id": 29066,
49
+ "scale_embedding": true,
50
+ "share_encoder_decoder_embeddings": true,
51
+ "static_position_embeddings": true,
52
+ "torch_dtype": "float32",
53
+ "transformers_version": "4.48.3",
54
+ "use_cache": true,
55
+ "vocab_size": 29067
56
+ }
generation_config.json CHANGED
@@ -12,5 +12,5 @@
12
  "max_length": 512,
13
  "num_beams": 6,
14
  "pad_token_id": 29066,
15
- "transformers_version": "4.47.1"
16
  }
 
12
  "max_length": 512,
13
  "num_beams": 6,
14
  "pad_token_id": 29066,
15
+ "transformers_version": "4.48.3"
16
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1346ebba926634b1580769868ee192fcd25193b7dbbcbd9072cc6b23bd01e338
3
  size 59391260
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6f9d3448a7947b8ff2c8ed96600951eab32b9ff3d7d2ca5681a3737a8a7c543
3
  size 59391260