andyjzhao commited on
Commit
76e09f1
·
verified ·
1 Parent(s): aff9928

Upload folder using huggingface_hub

Browse files
checkpoint-20/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:81b058994c7bb01c1974d22357374a9fcaa0183d790223af50af90811785860c
3
  size 558664971
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0deb9077d610bc6c6c3e247c7af784e7ca623d5cc46ce437b0fb27bdad27191e
3
  size 558664971
checkpoint-20/pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:816ae2edf36355d926b630d48a4a45b67f1986fecc140c4dbd67b92e8acb7e68
3
  size 279336283
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5fb880ce8ce6d6a3efccf929c14283e1399abfde74c8ba9c9a5cbad8426d40c
3
  size 279336283
checkpoint-20/trainer_state.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "best_global_step": 20,
3
- "best_metric": 148816.18235103838,
4
  "best_model_checkpoint": "/network/scratch/j/jianan.zhao/DNAFM/output/gencode_human_12.8k_12800/CKPT_DEBUG/checkpoint-20",
5
  "epoch": 0.0007091444172605751,
6
  "eval_steps": 10,
@@ -13,21 +13,21 @@
13
  "comp/rl_weight": 0.03,
14
  "comp/strictness": 0.0,
15
  "epoch": 0.00035457220863028757,
16
- "grad_norm": 1111.61279296875,
17
- "loss": 146.1968,
18
- "loss_ce": 54.305824279785156,
19
- "loss_region": 0.10640381276607513,
20
- "loss_total": 54.412227630615234,
21
  "lr": 2.20454076850486e-05,
22
- "router/selected_tokens_s0": 21.6875,
23
  "router/selected_tokens_s1": 1.0,
24
  "step": 10,
25
  "tokens_trained": 0.00409568
26
  },
27
  {
28
  "epoch": 0.00035457220863028757,
29
- "eval_ppl": 7.424721446549467e+17,
30
- "eval_runtime": 7.6118,
31
  "step": 10,
32
  "tokens_trained": 0.00409568
33
  },
@@ -41,14 +41,14 @@
41
  "eval_F_nig": 8.365496566708533e-05,
42
  "eval_F_promoter": 4.770454407466195e-05,
43
  "eval_F_utr": 7.9026626119178e-05,
44
- "eval_G": 0.00015868149956862495,
45
- "eval_G_cds": 0.00015302779857089767,
46
- "eval_G_dig": 0.00013616989934109436,
47
- "eval_G_exon": 0.0001662856506549812,
48
- "eval_G_intron": 0.00016059590085930095,
49
- "eval_G_nig": 0.00015651285421575188,
50
- "eval_G_promoter": 0.00015211858489077828,
51
- "eval_G_utr": 0.00018231460938894855,
52
  "eval_avg_bp_per_token": 12799.0,
53
  "eval_bp_per_token/cds": 21077.722222222223,
54
  "eval_bp_per_token/dig": 12799.0,
@@ -57,13 +57,13 @@
57
  "eval_bp_per_token/nig": 11953.86301369863,
58
  "eval_bp_per_token/promoter": 20962.363636363636,
59
  "eval_bp_per_token/utr": 12653.962962962964,
60
- "eval_ppl_cds": 2.4835679876749204e+20,
61
- "eval_ppl_dig": 9.028849885879438e+16,
62
- "eval_ppl_exon": 2.2687957801872694e+18,
63
- "eval_ppl_intron": 2.8188580486001277e+17,
64
- "eval_ppl_nig": 1.1219396355209006e+17,
65
- "eval_ppl_promoter": 1.6162428794991283e+20,
66
- "eval_ppl_utr": 4.3084586855038605e+17,
67
  "step": 10,
68
  "tokens_trained": 0.00409568
69
  },
@@ -71,11 +71,11 @@
71
  "comp/rl_weight": 0.03,
72
  "comp/strictness": 0.0,
73
  "epoch": 0.0007091444172605751,
74
- "grad_norm": 330.2630310058594,
75
- "loss": 25.8642,
76
- "loss_ce": 12.18574047088623,
77
- "loss_region": 0.09936444461345673,
78
- "loss_total": 12.285104751586914,
79
  "lr": 4.654030511288038e-05,
80
  "router/selected_tokens_s0": 1.0,
81
  "router/selected_tokens_s1": 1.0,
@@ -84,8 +84,8 @@
84
  },
85
  {
86
  "epoch": 0.0007091444172605751,
87
- "eval_ppl": 148816.18235103838,
88
- "eval_runtime": 7.2278,
89
  "step": 20,
90
  "tokens_trained": 0.00819136
91
  },
@@ -115,13 +115,13 @@
115
  "eval_bp_per_token/nig": 11953.86301369863,
116
  "eval_bp_per_token/promoter": 20962.363636363636,
117
  "eval_bp_per_token/utr": 12653.962962962964,
118
- "eval_ppl_cds": 395673.42673693807,
119
- "eval_ppl_dig": 31301.43244868628,
120
- "eval_ppl_exon": 140758.6210649053,
121
- "eval_ppl_intron": 122319.47964181146,
122
- "eval_ppl_nig": 111897.23080114595,
123
- "eval_ppl_promoter": 421597.51324185927,
124
- "eval_ppl_utr": 202983.65571501417,
125
  "step": 20,
126
  "tokens_trained": 0.00819136
127
  }
 
1
  {
2
  "best_global_step": 20,
3
+ "best_metric": 68320.79411816206,
4
  "best_model_checkpoint": "/network/scratch/j/jianan.zhao/DNAFM/output/gencode_human_12.8k_12800/CKPT_DEBUG/checkpoint-20",
5
  "epoch": 0.0007091444172605751,
6
  "eval_steps": 10,
 
13
  "comp/rl_weight": 0.03,
14
  "comp/strictness": 0.0,
15
  "epoch": 0.00035457220863028757,
16
+ "grad_norm": 1132.7886962890625,
17
+ "loss": 146.4728,
18
+ "loss_ce": 54.09164047241211,
19
+ "loss_region": 0.10727831721305847,
20
+ "loss_total": 54.198917388916016,
21
  "lr": 2.20454076850486e-05,
22
+ "router/selected_tokens_s0": 23.78125,
23
  "router/selected_tokens_s1": 1.0,
24
  "step": 10,
25
  "tokens_trained": 0.00409568
26
  },
27
  {
28
  "epoch": 0.00035457220863028757,
29
+ "eval_ppl": 1.0186400757990682e+18,
30
+ "eval_runtime": 7.2215,
31
  "step": 10,
32
  "tokens_trained": 0.00409568
33
  },
 
41
  "eval_F_nig": 8.365496566708533e-05,
42
  "eval_F_promoter": 4.770454407466195e-05,
43
  "eval_F_utr": 7.9026626119178e-05,
44
+ "eval_G": 0.00017417084202725643,
45
+ "eval_G_cds": 0.0001794316059741855,
46
+ "eval_G_dig": 0.0001354323335807485,
47
+ "eval_G_exon": 0.0001921070810539243,
48
+ "eval_G_intron": 0.00017461964504566645,
49
+ "eval_G_nig": 0.00016813731929190464,
50
+ "eval_G_promoter": 0.00017862840321777992,
51
+ "eval_G_utr": 0.0001872366236459373,
52
  "eval_avg_bp_per_token": 12799.0,
53
  "eval_bp_per_token/cds": 21077.722222222223,
54
  "eval_bp_per_token/dig": 12799.0,
 
57
  "eval_bp_per_token/nig": 11953.86301369863,
58
  "eval_bp_per_token/promoter": 20962.363636363636,
59
  "eval_bp_per_token/utr": 12653.962962962964,
60
+ "eval_ppl_cds": 3.3014647482544744e+20,
61
+ "eval_ppl_dig": 1.1622268373935285e+17,
62
+ "eval_ppl_exon": 3.2522437808130043e+18,
63
+ "eval_ppl_intron": 3.9138014555579206e+17,
64
+ "eval_ppl_nig": 1.5178663639690314e+17,
65
+ "eval_ppl_promoter": 2.1860490407785362e+20,
66
+ "eval_ppl_utr": 5.7390657388765485e+17,
67
  "step": 10,
68
  "tokens_trained": 0.00409568
69
  },
 
71
  "comp/rl_weight": 0.03,
72
  "comp/strictness": 0.0,
73
  "epoch": 0.0007091444172605751,
74
+ "grad_norm": 303.0186462402344,
75
+ "loss": 25.9086,
76
+ "loss_ce": 12.05529499053955,
77
+ "loss_region": 0.0993824377655983,
78
+ "loss_total": 12.154677391052246,
79
  "lr": 4.654030511288038e-05,
80
  "router/selected_tokens_s0": 1.0,
81
  "router/selected_tokens_s1": 1.0,
 
84
  },
85
  {
86
  "epoch": 0.0007091444172605751,
87
+ "eval_ppl": 68320.79411816206,
88
+ "eval_runtime": 7.246,
89
  "step": 20,
90
  "tokens_trained": 0.00819136
91
  },
 
115
  "eval_bp_per_token/nig": 11953.86301369863,
116
  "eval_bp_per_token/promoter": 20962.363636363636,
117
  "eval_bp_per_token/utr": 12653.962962962964,
118
+ "eval_ppl_cds": 154033.63476438122,
119
+ "eval_ppl_dig": 15444.189120194795,
120
+ "eval_ppl_exon": 60520.166471376135,
121
+ "eval_ppl_intron": 57146.884551315,
122
+ "eval_ppl_nig": 54740.4715093694,
123
+ "eval_ppl_promoter": 169717.99548180104,
124
+ "eval_ppl_utr": 97889.76239710047,
125
  "step": 20,
126
  "tokens_trained": 0.00819136
127
  }
checkpoint-20/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:09a783c7f9bdabaac56f2ebff5b9206706ee40f8914b7eb767a104570a9abfac
3
  size 5969
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27e50a78a0ad195455e56f8e4877f004f4700345080b847afe4f3e05a3937021
3
  size 5969