IvanHU commited on
Commit
6423ff6
·
1 Parent(s): b4f1bfd
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ global_step194526_universal/* filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/fs/archive/share/yulan/data/aa_mini/output/miniyulan-2B-final-stage19-hyw-2/checkpoint-184795-rms_norm",
3
+ "architectures": [
4
+ "MiniYuLanModelForCausalLM"
5
+ ],
6
+ "attention_bias": true,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "dim_model_base": 1920,
10
+ "dim_model_base_attn": 64,
11
+ "dim_model_base_init": null,
12
+ "dim_model_base_lmh": 1,
13
+ "dim_model_base_logits": 1920.0,
14
+ "dim_model_base_lr": 256.0,
15
+ "down_proj_alpha": 0.03450327796711771,
16
+ "embed_tokens_alpha": 1,
17
+ "embedding_ln": false,
18
+ "embedding_rmsln": false,
19
+ "eos_token_id": 2,
20
+ "gate_up_proj_alpha": 0.3651483716701107,
21
+ "gradient_checkpointing_step": 11,
22
+ "hidden_act": "silu",
23
+ "hidden_size": 1920,
24
+ "hidden_states_shrink": 0.18708286933869706,
25
+ "init_scale_o": 1,
26
+ "initializer_range": 5e-05,
27
+ "input_layernorm_alpha": 1.0,
28
+ "intermediate_size": 4800,
29
+ "k_proj_alpha": 0.3651483716701107,
30
+ "layer_norm_eps": 1e-06,
31
+ "lm_head_alpha": 1.0,
32
+ "ln_scale": 1,
33
+ "max_position_embeddings": 4096,
34
+ "model_reproduce": "transformer",
35
+ "model_type": "miniyulan",
36
+ "norm_alpha": 1.0,
37
+ "num_attention_heads": 30,
38
+ "num_epochs_trained_before_this_epoch": 19,
39
+ "num_hidden_layers": 56,
40
+ "num_key_value_heads": 6,
41
+ "num_steps_trained_before_this_epoch": 184795,
42
+ "o_proj_alpha": 0.03450327796711771,
43
+ "post_attention_layernorm_alpha": 1.0,
44
+ "q_proj_alpha": 0.3651483716701107,
45
+ "qk_layernorm": false,
46
+ "rms_norm_eps": 1e-06,
47
+ "rms_type": "llama",
48
+ "rope_scaling": null,
49
+ "rope_theta": 10000.0,
50
+ "scale_emb": 10.0,
51
+ "shrink_alpha": 1,
52
+ "sliding_window": null,
53
+ "tie_word_embeddings": true,
54
+ "torch_dtype": "bfloat16",
55
+ "transformers_version": "4.44.0",
56
+ "use_cache": false,
57
+ "use_emb_alpha": true,
58
+ "use_liger": true,
59
+ "use_norm_alpha": true,
60
+ "use_sliding_window": false,
61
+ "v_proj_alpha": 0.3651483716701107,
62
+ "vocab_size": 99000,
63
+ "wesar_weights": true,
64
+ "z_loss": 0.0001
65
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0385d81e1abc139255726b6ffed93d21a678bd776423c82db068dd9752c23635
3
+ size 4848661852
special_tokens_map.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<reasoning_step>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "<|start_header_id|>",
12
+ "lstrip": false,
13
+ "normalized": false,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ },
17
+ {
18
+ "content": "<|end_header_id|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ {
25
+ "content": "<|eot_id|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ ],
32
+ "bos_token": {
33
+ "content": "<s>",
34
+ "lstrip": false,
35
+ "normalized": false,
36
+ "rstrip": false,
37
+ "single_word": false
38
+ },
39
+ "eos_token": {
40
+ "content": "</s>",
41
+ "lstrip": false,
42
+ "normalized": false,
43
+ "rstrip": false,
44
+ "single_word": false
45
+ },
46
+ "unk_token": {
47
+ "content": "<unk>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false
52
+ },
53
+ "pad_token": {
54
+ "content": "<pad>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false
59
+ }
60
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "102": {
30
+ "content": "<pad>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "103": {
38
+ "content": "<reasoning_step>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "104": {
46
+ "content": "<|start_header_id|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "105": {
54
+ "content": "<|end_header_id|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "106": {
62
+ "content": "<|eot_id|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ }
69
+ },
70
+ "bos_token": "<s>",
71
+ "clean_up_tokenization_spaces": false,
72
+ "eos_token": "</s>",
73
+ "legacy": true,
74
+ "model_max_length": 1000000000000000019884624838656,
75
+ "pad_token": "<pad>",
76
+ "sp_model_kwargs": {},
77
+ "spaces_between_special_tokens": false,
78
+ "tokenizer_class": "LlamaTokenizer",
79
+ "unk_token": "<unk>",
80
+ "use_default_system_prompt": false,
81
+ "chat_template": "{% if messages[0]['role'] == 'system' %}\n {% set offset = 1 %}\n{% else %}\n {% set offset = 0 %}\n{% endif %}\n\n{{ bos_token }}\n{% for message in messages %}\n {% if (message['role'] == 'user') != (loop.index0 % 2 == offset) %}\n {{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}\n {% endif %}\n\n {{ '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n' + message['content'] | trim + '<|eot_id|>' }}\n{% endfor %}\n\n{% if add_generation_prompt %}\n {{ '<|start_header_id|>' + 'assistant' + '<|end_header_id|>\n\n' }}\n{% endif %}"
82
+ }
trainer_state.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 0.0,
5
+ "eval_steps": 500,
6
+ "global_step": 0,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ ],
12
+ "logging_steps": 3,
13
+ "max_steps": 0,
14
+ "num_input_tokens_seen": 0,
15
+ "num_train_epochs": 0,
16
+ "save_steps": "NEED CHANGE",
17
+ "stateful_callbacks": {
18
+ "TrainerControl": {
19
+ "args": {
20
+ "should_epoch_stop": false,
21
+ "should_evaluate": false,
22
+ "should_log": false,
23
+ "should_save": true,
24
+ "should_training_stop": true
25
+ },
26
+ "attributes": {}
27
+ }
28
+ },
29
+ "total_flos": 0,
30
+ "train_batch_size": "NEED CHANGE",
31
+ "trial_name": null,
32
+ "trial_params": null
33
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b091a9c9025c7790c7a991a1394f67c8da76d9287da770d886b4a698f5acc51c
3
+ size 10872