yimingzhang commited on
Commit
5826fc1
·
1 Parent(s): 1c49548

Model save

Browse files
README.md CHANGED
@@ -15,15 +15,15 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [alignment-handbook/zephyr-7b-sft-full](https://huggingface.co/alignment-handbook/zephyr-7b-sft-full) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 0.4134
19
- - Rewards/chosen: -1.1629
20
- - Rewards/rejected: -10.3565
21
- - Rewards/accuracies: 0.8680
22
- - Rewards/margins: 9.1936
23
- - Logps/rejected: -329.2987
24
- - Logps/chosen: -259.4434
25
- - Logits/rejected: -2.7296
26
- - Logits/chosen: -2.6963
27
 
28
  ## Model description
29
 
@@ -59,64 +59,67 @@ The following hyperparameters were used during training:
59
 
60
  | Training Loss | Epoch | Step | Validation Loss | Rewards/chosen | Rewards/rejected | Rewards/accuracies | Rewards/margins | Logps/rejected | Logps/chosen | Logits/rejected | Logits/chosen |
61
  |:-------------:|:-----:|:----:|:---------------:|:--------------:|:----------------:|:------------------:|:---------------:|:--------------:|:------------:|:---------------:|:-------------:|
62
- | 0.5849 | 0.05 | 100 | 0.5763 | 0.4134 | 0.0482 | 0.7320 | 0.3652 | -225.2518 | -243.6803 | -2.9389 | -2.9064 |
63
- | 0.3703 | 0.1 | 200 | 0.4118 | 0.2900 | -1.1319 | 0.7900 | 1.4219 | -237.0526 | -244.9145 | -2.9372 | -2.8993 |
64
- | 0.4041 | 0.15 | 300 | 0.4334 | 0.1380 | -1.8819 | 0.8180 | 2.0199 | -244.5529 | -246.4347 | -2.9551 | -2.9179 |
65
- | 0.3508 | 0.21 | 400 | 0.3957 | 0.2646 | -2.6072 | 0.8440 | 2.8718 | -251.8057 | -245.1687 | -2.9765 | -2.9422 |
66
- | 0.3785 | 0.26 | 500 | 0.3717 | 0.4445 | -2.6346 | 0.8420 | 3.0792 | -252.0801 | -243.3692 | -3.0745 | -3.0376 |
67
- | 0.4096 | 0.31 | 600 | 0.3610 | 0.5559 | -2.6855 | 0.8380 | 3.2414 | -252.5888 | -242.2559 | -3.0165 | -2.9719 |
68
- | 0.3551 | 0.36 | 700 | 0.3508 | 0.4284 | -3.0859 | 0.8520 | 3.5143 | -256.5924 | -243.5300 | -3.0461 | -3.0172 |
69
- | 0.3751 | 0.41 | 800 | 0.3683 | 0.4576 | -3.3895 | 0.8480 | 3.8472 | -259.6292 | -243.2381 | -2.9170 | -2.8920 |
70
- | 0.4334 | 0.46 | 900 | 0.3622 | 0.3515 | -3.5622 | 0.8420 | 3.9137 | -261.3556 | -244.2990 | -2.9299 | -2.8964 |
71
- | 0.4673 | 0.52 | 1000 | 0.3595 | 0.3676 | -4.0125 | 0.8720 | 4.3800 | -265.8586 | -244.1389 | -2.9297 | -2.8947 |
72
- | 0.363 | 0.57 | 1100 | 0.3266 | 0.5186 | -3.6224 | 0.8420 | 4.1410 | -261.9582 | -242.6288 | -2.8564 | -2.8256 |
73
- | 0.3675 | 0.62 | 1200 | 0.3256 | 0.5935 | -3.9886 | 0.8320 | 4.5821 | -265.6197 | -241.8796 | -2.8294 | -2.7960 |
74
- | 0.3265 | 0.67 | 1300 | 0.3339 | 0.7248 | -4.0507 | 0.8500 | 4.7755 | -266.2408 | -240.5668 | -2.9186 | -2.8869 |
75
- | 0.4276 | 0.72 | 1400 | 0.3391 | 0.5884 | -4.2450 | 0.8440 | 4.8334 | -268.1834 | -241.9301 | -2.9354 | -2.9044 |
76
- | 0.3512 | 0.77 | 1500 | 0.3597 | 0.8748 | -4.2015 | 0.8360 | 5.0763 | -267.7492 | -239.0665 | -2.9078 | -2.8697 |
77
- | 0.3429 | 0.83 | 1600 | 0.3304 | 0.5707 | -4.4323 | 0.8360 | 5.0030 | -270.0566 | -242.1075 | -2.9041 | -2.8661 |
78
- | 0.4142 | 0.88 | 1700 | 0.3241 | 0.6941 | -4.0424 | 0.8500 | 4.7365 | -266.1581 | -240.8735 | -2.8839 | -2.8458 |
79
- | 0.3281 | 0.93 | 1800 | 0.3316 | 0.6987 | -4.3778 | 0.8320 | 5.0765 | -269.5117 | -240.8274 | -2.8548 | -2.8151 |
80
- | 0.3652 | 0.98 | 1900 | 0.3273 | 0.7334 | -4.1642 | 0.8260 | 4.8976 | -267.3762 | -240.4808 | -2.8455 | -2.8051 |
81
- | 0.0329 | 1.03 | 2000 | 0.3243 | 0.7401 | -4.8320 | 0.8360 | 5.5721 | -274.0543 | -240.4135 | -2.8127 | -2.7749 |
82
- | 0.0473 | 1.08 | 2100 | 0.3209 | 0.5992 | -5.3071 | 0.8440 | 5.9063 | -278.8052 | -241.8228 | -2.8128 | -2.7757 |
83
- | 0.0519 | 1.14 | 2200 | 0.3360 | 0.4729 | -6.0694 | 0.8540 | 6.5423 | -286.4280 | -243.0853 | -2.8572 | -2.8167 |
84
- | 0.0637 | 1.19 | 2300 | 0.3173 | 0.4218 | -6.1038 | 0.8500 | 6.5256 | -286.7715 | -243.5961 | -2.8084 | -2.7744 |
85
- | 0.1132 | 1.24 | 2400 | 0.3619 | 0.5158 | -6.3683 | 0.8620 | 6.8840 | -289.4164 | -242.6569 | -2.7845 | -2.7428 |
86
- | 0.0455 | 1.29 | 2500 | 0.3457 | 0.3782 | -6.9569 | 0.8680 | 7.3351 | -295.3029 | -244.0325 | -2.8466 | -2.8059 |
87
- | 0.0506 | 1.34 | 2600 | 0.3638 | 0.5356 | -6.6879 | 0.8740 | 7.2234 | -292.6124 | -242.4586 | -2.8550 | -2.8132 |
88
- | 0.0561 | 1.39 | 2700 | 0.3429 | 0.0200 | -6.8955 | 0.8540 | 6.9154 | -294.6885 | -247.6150 | -2.7937 | -2.7605 |
89
- | 0.0744 | 1.45 | 2800 | 0.3600 | 0.4107 | -5.9150 | 0.8580 | 6.3256 | -284.8834 | -243.7078 | -2.8861 | -2.8541 |
90
- | 0.0542 | 1.5 | 2900 | 0.3590 | 0.4739 | -6.5851 | 0.8620 | 7.0590 | -291.5851 | -243.0756 | -2.7995 | -2.7685 |
91
- | 0.0534 | 1.55 | 3000 | 0.3317 | 0.2732 | -6.5533 | 0.8640 | 6.8265 | -291.2669 | -245.0823 | -2.7514 | -2.7185 |
92
- | 0.0552 | 1.6 | 3100 | 0.3435 | 0.2221 | -6.8910 | 0.8680 | 7.1130 | -294.6435 | -245.5938 | -2.8895 | -2.8457 |
93
- | 0.0561 | 1.65 | 3200 | 0.3249 | 0.2894 | -6.7482 | 0.8620 | 7.0376 | -293.2156 | -244.9207 | -2.8713 | -2.8288 |
94
- | 0.0898 | 1.7 | 3300 | 0.3395 | 0.0255 | -7.2638 | 0.8600 | 7.2893 | -298.3713 | -247.5592 | -2.8631 | -2.8248 |
95
- | 0.038 | 1.76 | 3400 | 0.3603 | -0.1362 | -7.7326 | 0.8620 | 7.5964 | -303.0600 | -249.1768 | -2.8984 | -2.8586 |
96
- | 0.0369 | 1.81 | 3500 | 0.3380 | 0.1519 | -7.2043 | 0.8640 | 7.3562 | -297.7772 | -246.2957 | -2.7709 | -2.7392 |
97
- | 0.05 | 1.86 | 3600 | 0.3445 | 0.0541 | -7.4105 | 0.8660 | 7.4646 | -299.8390 | -247.2734 | -2.8671 | -2.8318 |
98
- | 0.0576 | 1.91 | 3700 | 0.3461 | 0.0272 | -7.3255 | 0.8720 | 7.3527 | -298.9885 | -247.5422 | -2.8397 | -2.8001 |
99
- | 0.0632 | 1.96 | 3800 | 0.3487 | -0.1259 | -7.5962 | 0.8660 | 7.4703 | -301.6957 | -249.0733 | -2.7688 | -2.7323 |
100
- | 0.016 | 2.01 | 3900 | 0.3600 | -0.0502 | -7.6061 | 0.8720 | 7.5559 | -301.7952 | -248.3167 | -2.7419 | -2.7075 |
101
- | 0.0272 | 2.07 | 4000 | 0.3654 | -0.7856 | -8.5882 | 0.8700 | 7.8026 | -311.6161 | -255.6703 | -2.7034 | -2.6760 |
102
- | 0.0062 | 2.12 | 4100 | 0.3840 | -0.8021 | -9.2416 | 0.8680 | 8.4395 | -318.1496 | -255.8355 | -2.6878 | -2.6539 |
103
- | 0.0132 | 2.17 | 4200 | 0.3860 | -0.6756 | -8.9420 | 0.8660 | 8.2663 | -315.1535 | -254.5710 | -2.6362 | -2.6286 |
104
- | 0.0073 | 2.22 | 4300 | 0.4065 | -0.6866 | -9.3464 | 0.8700 | 8.6599 | -319.1981 | -254.6802 | -2.6570 | -2.6447 |
105
- | 0.0061 | 2.27 | 4400 | 0.3912 | -0.3556 | -9.0298 | 0.8620 | 8.6742 | -316.0318 | -251.3709 | -2.6600 | -2.6548 |
106
- | 0.0159 | 2.32 | 4500 | 0.3864 | -0.3204 | -8.6909 | 0.8620 | 8.3705 | -312.6425 | -251.0182 | -2.6958 | -2.6766 |
107
- | 0.0065 | 2.37 | 4600 | 0.4118 | -0.8305 | -9.4630 | 0.8580 | 8.6325 | -320.3637 | -256.1196 | -2.6806 | -2.6636 |
108
- | 0.0095 | 2.43 | 4700 | 0.4205 | -1.0027 | -9.8306 | 0.8660 | 8.8279 | -324.0402 | -257.8415 | -2.6817 | -2.6589 |
109
- | 0.0345 | 2.48 | 4800 | 0.4206 | -1.0960 | -10.0555 | 0.8680 | 8.9595 | -326.2886 | -258.7745 | -2.7313 | -2.7052 |
110
- | 0.013 | 2.53 | 4900 | 0.4236 | -1.1580 | -10.1711 | 0.8680 | 9.0131 | -327.4450 | -259.3949 | -2.7044 | -2.6821 |
111
- | 0.0097 | 2.58 | 5000 | 0.4076 | -1.0887 | -9.9938 | 0.8700 | 8.9052 | -325.6721 | -258.7013 | -2.7533 | -2.7272 |
112
- | 0.0032 | 2.63 | 5100 | 0.4084 | -1.0632 | -10.0314 | 0.8700 | 8.9682 | -326.0481 | -258.4468 | -2.7277 | -2.7032 |
113
- | 0.0072 | 2.68 | 5200 | 0.4145 | -1.2339 | -10.3018 | 0.8700 | 9.0678 | -328.7514 | -260.1539 | -2.7199 | -2.6952 |
114
- | 0.0012 | 2.74 | 5300 | 0.4163 | -1.1418 | -10.3024 | 0.8680 | 9.1605 | -328.7574 | -259.2326 | -2.7094 | -2.6844 |
115
- | 0.0098 | 2.79 | 5400 | 0.4212 | -1.1635 | -10.3961 | 0.8640 | 9.2326 | -329.6949 | -259.4492 | -2.7279 | -2.6969 |
116
- | 0.0277 | 2.84 | 5500 | 0.4179 | -1.2864 | -10.4955 | 0.8720 | 9.2091 | -330.6889 | -260.6785 | -2.7285 | -2.6942 |
117
- | 0.0048 | 2.89 | 5600 | 0.4178 | -1.3095 | -10.5357 | 0.8680 | 9.2262 | -331.0909 | -260.9099 | -2.7274 | -2.6932 |
118
- | 0.0041 | 2.94 | 5700 | 0.4141 | -1.2063 | -10.4035 | 0.8680 | 9.1972 | -329.7684 | -259.8775 | -2.7293 | -2.6959 |
119
- | 0.0014 | 2.99 | 5800 | 0.4131 | -1.1597 | -10.3577 | 0.8700 | 9.1980 | -329.3108 | -259.4119 | -2.7301 | -2.6969 |
 
 
 
120
 
121
 
122
  ### Framework versions
 
15
 
16
  This model is a fine-tuned version of [alignment-handbook/zephyr-7b-sft-full](https://huggingface.co/alignment-handbook/zephyr-7b-sft-full) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 0.6578
19
+ - Rewards/chosen: -2.4796
20
+ - Rewards/rejected: -6.7098
21
+ - Rewards/accuracies: 0.7849
22
+ - Rewards/margins: 4.2302
23
+ - Logps/rejected: -336.6310
24
+ - Logps/chosen: -339.6573
25
+ - Logits/rejected: -2.7799
26
+ - Logits/chosen: -2.7857
27
 
28
  ## Model description
29
 
 
59
 
60
  | Training Loss | Epoch | Step | Validation Loss | Rewards/chosen | Rewards/rejected | Rewards/accuracies | Rewards/margins | Logps/rejected | Logps/chosen | Logits/rejected | Logits/chosen |
61
  |:-------------:|:-----:|:----:|:---------------:|:--------------:|:----------------:|:------------------:|:---------------:|:--------------:|:------------:|:---------------:|:-------------:|
62
+ | 0.6163 | 0.05 | 100 | 0.6102 | 0.4041 | 0.1032 | 0.6581 | 0.3009 | -268.5007 | -310.8200 | -2.9269 | -2.9344 |
63
+ | 0.5771 | 0.1 | 200 | 0.5582 | 0.3265 | -0.3378 | 0.7243 | 0.6642 | -272.9109 | -311.5964 | -2.9032 | -2.9106 |
64
+ | 0.586 | 0.15 | 300 | 0.5412 | 0.1738 | -0.5739 | 0.7059 | 0.7477 | -275.2723 | -313.1228 | -2.9433 | -2.9511 |
65
+ | 0.5813 | 0.2 | 400 | 0.5903 | 0.6425 | -0.3995 | 0.7169 | 1.0420 | -273.5283 | -308.4358 | -2.9390 | -2.9467 |
66
+ | 0.5532 | 0.24 | 500 | 0.5900 | 0.0572 | -0.9069 | 0.7132 | 0.9641 | -278.6020 | -314.2893 | -2.9779 | -2.9873 |
67
+ | 0.8947 | 0.29 | 600 | 0.6950 | -0.1311 | -0.9980 | 0.7040 | 0.8669 | -279.5131 | -316.1719 | -2.9452 | -2.9646 |
68
+ | 0.6725 | 0.34 | 700 | 0.6230 | 0.4265 | -0.8118 | 0.6930 | 1.2384 | -277.6516 | -310.5957 | -3.0431 | -3.0603 |
69
+ | 0.5614 | 0.39 | 800 | 0.6195 | 0.3130 | -0.9523 | 0.7279 | 1.2653 | -279.0566 | -311.7314 | -3.0410 | -3.0701 |
70
+ | 0.6018 | 0.44 | 900 | 0.5879 | 0.1986 | -1.1034 | 0.7316 | 1.3020 | -280.5670 | -312.8752 | -2.9048 | -2.9212 |
71
+ | 0.5223 | 0.49 | 1000 | 0.5430 | 0.0003 | -1.3865 | 0.7463 | 1.3868 | -283.3980 | -314.8583 | -3.0152 | -3.0240 |
72
+ | 0.5991 | 0.54 | 1100 | 0.5443 | 0.3420 | -1.1651 | 0.7298 | 1.5071 | -281.1841 | -311.4412 | -2.9692 | -2.9694 |
73
+ | 0.6307 | 0.59 | 1200 | 0.5672 | 0.2669 | -1.3556 | 0.7188 | 1.6225 | -283.0891 | -312.1918 | -3.0108 | -3.0177 |
74
+ | 0.5154 | 0.64 | 1300 | 0.5394 | 0.2294 | -1.4459 | 0.7482 | 1.6753 | -283.9921 | -312.5673 | -2.9960 | -2.9995 |
75
+ | 0.6424 | 0.68 | 1400 | 0.5720 | 0.1759 | -1.3868 | 0.7261 | 1.5627 | -283.4010 | -313.1016 | -2.8801 | -2.8847 |
76
+ | 0.587 | 0.73 | 1500 | 0.5491 | 0.0712 | -1.5333 | 0.7224 | 1.6045 | -284.8664 | -314.1490 | -2.8348 | -2.8401 |
77
+ | 0.5185 | 0.78 | 1600 | 0.5475 | 0.5502 | -1.0613 | 0.7555 | 1.6115 | -280.1465 | -309.3589 | -2.8407 | -2.8521 |
78
+ | 0.6767 | 0.83 | 1700 | 0.5471 | 0.2562 | -1.4091 | 0.7574 | 1.6652 | -283.6239 | -312.2993 | -2.8209 | -2.8255 |
79
+ | 0.48 | 0.88 | 1800 | 0.5310 | 0.3013 | -1.4781 | 0.7555 | 1.7794 | -284.3137 | -311.8475 | -2.8333 | -2.8411 |
80
+ | 0.5443 | 0.93 | 1900 | 0.5323 | 0.3830 | -1.5307 | 0.7702 | 1.9137 | -284.8400 | -311.0307 | -2.8435 | -2.8521 |
81
+ | 0.5506 | 0.98 | 2000 | 0.5222 | -0.1696 | -2.0960 | 0.7537 | 1.9264 | -290.4926 | -316.5567 | -2.8470 | -2.8491 |
82
+ | 0.1276 | 1.03 | 2100 | 0.5308 | -0.0587 | -2.3160 | 0.7537 | 2.2573 | -292.6928 | -315.4478 | -2.8374 | -2.8447 |
83
+ | 0.1082 | 1.07 | 2200 | 0.5518 | -0.1785 | -2.5136 | 0.7647 | 2.3351 | -294.6690 | -316.6463 | -2.8418 | -2.8504 |
84
+ | 0.1003 | 1.12 | 2300 | 0.5922 | -0.2226 | -2.7697 | 0.7610 | 2.5471 | -297.2297 | -317.0865 | -2.8248 | -2.8383 |
85
+ | 0.129 | 1.17 | 2400 | 0.5731 | -0.1264 | -2.7407 | 0.7665 | 2.6143 | -296.9401 | -316.1251 | -2.8390 | -2.8419 |
86
+ | 0.1052 | 1.22 | 2500 | 0.5872 | -0.1003 | -2.7871 | 0.7684 | 2.6868 | -297.4046 | -315.8643 | -2.8816 | -2.8835 |
87
+ | 0.0895 | 1.27 | 2600 | 0.6246 | -0.1075 | -2.8772 | 0.7610 | 2.7697 | -298.3056 | -315.9363 | -2.8468 | -2.8509 |
88
+ | 0.0801 | 1.32 | 2700 | 0.5785 | -0.8504 | -3.7473 | 0.7721 | 2.8969 | -307.0061 | -323.3646 | -2.7997 | -2.7980 |
89
+ | 0.1012 | 1.37 | 2800 | 0.5710 | -0.4670 | -3.4414 | 0.7849 | 2.9745 | -303.9475 | -319.5309 | -2.7950 | -2.7899 |
90
+ | 0.0899 | 1.42 | 2900 | 0.5645 | -0.9008 | -3.8628 | 0.7702 | 2.9621 | -308.1616 | -323.8686 | -2.8207 | -2.8211 |
91
+ | 0.1434 | 1.47 | 3000 | 0.5710 | -0.6881 | -3.3126 | 0.7702 | 2.6245 | -302.6590 | -321.7423 | -2.8740 | -2.8758 |
92
+ | 0.1172 | 1.51 | 3100 | 0.5433 | -1.1037 | -3.7443 | 0.7849 | 2.6405 | -306.9757 | -325.8981 | -2.8496 | -2.8471 |
93
+ | 0.0997 | 1.56 | 3200 | 0.5483 | -0.8675 | -3.6998 | 0.7757 | 2.8323 | -306.5312 | -323.5364 | -2.8201 | -2.8260 |
94
+ | 0.0793 | 1.61 | 3300 | 0.5521 | -0.3552 | -3.2334 | 0.7886 | 2.8782 | -301.8674 | -318.4130 | -2.8553 | -2.8636 |
95
+ | 0.0706 | 1.66 | 3400 | 0.5406 | -0.4625 | -3.2643 | 0.7702 | 2.8017 | -302.1759 | -319.4865 | -2.8646 | -2.8763 |
96
+ | 0.115 | 1.71 | 3500 | 0.5674 | -0.8059 | -3.5663 | 0.7739 | 2.7604 | -305.1960 | -322.9196 | -2.8838 | -2.8907 |
97
+ | 0.1311 | 1.76 | 3600 | 0.5627 | -0.8274 | -3.7111 | 0.7721 | 2.8837 | -306.6445 | -323.1350 | -2.9513 | -2.9536 |
98
+ | 0.1318 | 1.81 | 3700 | 0.5681 | -0.8221 | -3.7147 | 0.7757 | 2.8926 | -306.6800 | -323.0818 | -2.9496 | -2.9519 |
99
+ | 0.0986 | 1.86 | 3800 | 0.5459 | -0.9712 | -3.7337 | 0.7868 | 2.7625 | -306.8705 | -324.5732 | -2.9532 | -2.9521 |
100
+ | 0.1091 | 1.91 | 3900 | 0.5304 | -0.7304 | -3.5666 | 0.7978 | 2.8362 | -305.1989 | -322.1651 | -2.9126 | -2.9083 |
101
+ | 0.0919 | 1.95 | 4000 | 0.5359 | -0.7072 | -3.6188 | 0.7941 | 2.9116 | -305.7208 | -321.9329 | -2.9641 | -2.9594 |
102
+ | 0.0389 | 2.0 | 4100 | 0.5431 | -0.8382 | -3.8823 | 0.7960 | 3.0441 | -308.3558 | -323.2430 | -2.9283 | -2.9238 |
103
+ | 0.0056 | 2.05 | 4200 | 0.5895 | -1.3165 | -4.8364 | 0.7996 | 3.5199 | -317.8972 | -328.0264 | -2.9077 | -2.9064 |
104
+ | 0.0192 | 2.1 | 4300 | 0.6121 | -1.7383 | -5.4734 | 0.7868 | 3.7351 | -324.2670 | -332.2442 | -2.9048 | -2.9052 |
105
+ | 0.0241 | 2.15 | 4400 | 0.6286 | -1.9652 | -5.8420 | 0.7812 | 3.8768 | -327.9529 | -334.5132 | -2.8892 | -2.8890 |
106
+ | 0.0091 | 2.2 | 4500 | 0.6283 | -2.1645 | -6.2079 | 0.7849 | 4.0434 | -331.6122 | -336.5059 | -2.8420 | -2.8453 |
107
+ | 0.0493 | 2.25 | 4600 | 0.6470 | -2.2950 | -6.4230 | 0.7776 | 4.1281 | -333.7635 | -337.8107 | -2.8227 | -2.8274 |
108
+ | 0.0155 | 2.3 | 4700 | 0.6362 | -2.0410 | -6.0829 | 0.7849 | 4.0419 | -330.3618 | -335.2705 | -2.8404 | -2.8459 |
109
+ | 0.0061 | 2.34 | 4800 | 0.6448 | -2.1377 | -6.2282 | 0.7868 | 4.0905 | -331.8155 | -336.2385 | -2.8279 | -2.8331 |
110
+ | 0.0166 | 2.39 | 4900 | 0.6481 | -1.9867 | -6.0035 | 0.7868 | 4.0168 | -329.5686 | -334.7283 | -2.8678 | -2.8704 |
111
+ | 0.0122 | 2.44 | 5000 | 0.6697 | -2.3651 | -6.6009 | 0.7757 | 4.2359 | -335.5422 | -338.5115 | -2.8242 | -2.8250 |
112
+ | 0.0105 | 2.49 | 5100 | 0.6560 | -2.1710 | -6.3487 | 0.7812 | 4.1777 | -333.0203 | -336.5708 | -2.8010 | -2.8055 |
113
+ | 0.0135 | 2.54 | 5200 | 0.6586 | -2.1484 | -6.3377 | 0.7794 | 4.1893 | -332.9100 | -336.3448 | -2.7855 | -2.7913 |
114
+ | 0.0134 | 2.59 | 5300 | 0.6482 | -2.1512 | -6.2937 | 0.7757 | 4.1424 | -332.4699 | -336.3733 | -2.7916 | -2.7951 |
115
+ | 0.0121 | 2.64 | 5400 | 0.6488 | -2.0332 | -6.2404 | 0.7776 | 4.2072 | -331.9375 | -335.1934 | -2.7945 | -2.7988 |
116
+ | 0.0201 | 2.69 | 5500 | 0.6545 | -2.1552 | -6.3595 | 0.7739 | 4.2043 | -333.1280 | -336.4128 | -2.7999 | -2.8030 |
117
+ | 0.0067 | 2.74 | 5600 | 0.6635 | -2.3473 | -6.5838 | 0.7757 | 4.2364 | -335.3709 | -338.3344 | -2.8061 | -2.8096 |
118
+ | 0.0061 | 2.78 | 5700 | 0.6532 | -2.2098 | -6.4082 | 0.7812 | 4.1985 | -333.6156 | -336.9589 | -2.8122 | -2.8148 |
119
+ | 0.0094 | 2.83 | 5800 | 0.6483 | -2.2624 | -6.4509 | 0.7794 | 4.1886 | -334.0426 | -337.4847 | -2.8032 | -2.8063 |
120
+ | 0.0128 | 2.88 | 5900 | 0.6510 | -2.3776 | -6.5483 | 0.7831 | 4.1706 | -335.0157 | -338.6371 | -2.7903 | -2.7943 |
121
+ | 0.0061 | 2.93 | 6000 | 0.6547 | -2.4427 | -6.6537 | 0.7812 | 4.2110 | -336.0696 | -339.2876 | -2.7825 | -2.7878 |
122
+ | 0.0082 | 2.98 | 6100 | 0.6570 | -2.4834 | -6.7156 | 0.7812 | 4.2322 | -336.6893 | -339.6953 | -2.7804 | -2.7861 |
123
 
124
 
125
  ### Framework versions
all_results.json CHANGED
@@ -1,21 +1,21 @@
1
  {
2
  "epoch": 3.0,
3
- "eval_logits/chosen": -2.6962802410125732,
4
- "eval_logits/rejected": -2.729593276977539,
5
- "eval_logps/chosen": -259.44342041015625,
6
- "eval_logps/rejected": -329.2986755371094,
7
- "eval_loss": 0.4134460389614105,
8
- "eval_rewards/accuracies": 0.8679999709129333,
9
- "eval_rewards/chosen": -1.16289222240448,
10
- "eval_rewards/margins": 9.193593978881836,
11
- "eval_rewards/rejected": -10.356484413146973,
12
- "eval_runtime": 276.1856,
13
- "eval_samples": 2000,
14
- "eval_samples_per_second": 7.242,
15
- "eval_steps_per_second": 0.453,
16
- "train_loss": 0.1539872911558545,
17
- "train_runtime": 74041.8418,
18
- "train_samples": 61966,
19
- "train_samples_per_second": 2.511,
20
- "train_steps_per_second": 0.078
21
  }
 
1
  {
2
  "epoch": 3.0,
3
+ "eval_logits/chosen": -2.785656690597534,
4
+ "eval_logits/rejected": -2.7799313068389893,
5
+ "eval_logps/chosen": -339.6573486328125,
6
+ "eval_logps/rejected": -336.6309509277344,
7
+ "eval_loss": 0.6578378677368164,
8
+ "eval_rewards/accuracies": 0.7849264740943909,
9
+ "eval_rewards/chosen": -2.4796314239501953,
10
+ "eval_rewards/margins": 4.230152130126953,
11
+ "eval_rewards/rejected": -6.709782600402832,
12
+ "eval_runtime": 303.5228,
13
+ "eval_samples": 2172,
14
+ "eval_samples_per_second": 7.156,
15
+ "eval_steps_per_second": 0.448,
16
+ "train_loss": 0.23823042969992683,
17
+ "train_runtime": 80075.9645,
18
+ "train_samples": 65498,
19
+ "train_samples_per_second": 2.454,
20
+ "train_steps_per_second": 0.077
21
  }
eval_results.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
  "epoch": 3.0,
3
- "eval_logits/chosen": -2.6962802410125732,
4
- "eval_logits/rejected": -2.729593276977539,
5
- "eval_logps/chosen": -259.44342041015625,
6
- "eval_logps/rejected": -329.2986755371094,
7
- "eval_loss": 0.4134460389614105,
8
- "eval_rewards/accuracies": 0.8679999709129333,
9
- "eval_rewards/chosen": -1.16289222240448,
10
- "eval_rewards/margins": 9.193593978881836,
11
- "eval_rewards/rejected": -10.356484413146973,
12
- "eval_runtime": 276.1856,
13
- "eval_samples": 2000,
14
- "eval_samples_per_second": 7.242,
15
- "eval_steps_per_second": 0.453
16
  }
 
1
  {
2
  "epoch": 3.0,
3
+ "eval_logits/chosen": -2.785656690597534,
4
+ "eval_logits/rejected": -2.7799313068389893,
5
+ "eval_logps/chosen": -339.6573486328125,
6
+ "eval_logps/rejected": -336.6309509277344,
7
+ "eval_loss": 0.6578378677368164,
8
+ "eval_rewards/accuracies": 0.7849264740943909,
9
+ "eval_rewards/chosen": -2.4796314239501953,
10
+ "eval_rewards/margins": 4.230152130126953,
11
+ "eval_rewards/rejected": -6.709782600402832,
12
+ "eval_runtime": 303.5228,
13
+ "eval_samples": 2172,
14
+ "eval_samples_per_second": 7.156,
15
+ "eval_steps_per_second": 0.448
16
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a4c0c36a8579acc76ed11154580253619f7df454e118be9d39594556b4079f21
3
  size 4943162336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af8b495a095a9dee7ba10a4b15bedfa6a0718b2cd6c735da1409b22509fd83e7
3
  size 4943162336
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8db2646026ac033fb9309238d1cbdfd2cf5009cd414c9bce2ed282e7b390c387
3
  size 4999819336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49b12ada264b41b5c09f42e7d971842fc36a8aedf26dd403a7c3e8036f221192
3
  size 4999819336
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9f3daf8bd91b85a9eac71b5aa0144774bf465860d858b2ead3b2488cf5fa7f52
3
  size 4540516344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:353c13b7eae5ae59900e44c7137185450d73c137f9af5a9d964eb40e54a585f8
3
  size 4540516344
runs/Jan08_01-18-55_babel-5-7/events.out.tfevents.1704694906.babel-5-7.2930375.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f899173c552e395b8db2c2ce760177681d84a632b84d0acca8446cb8c8546216
3
+ size 439587
runs/Jan08_01-18-55_babel-5-7/events.out.tfevents.1704775285.babel-5-7.2930375.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbb952e3ed4bcd5114c82239d34855f45cd9ca9ae291155922ab590066395f9f
3
+ size 828
train_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "epoch": 3.0,
3
- "train_loss": 0.1539872911558545,
4
- "train_runtime": 74041.8418,
5
- "train_samples": 61966,
6
- "train_samples_per_second": 2.511,
7
- "train_steps_per_second": 0.078
8
  }
 
1
  {
2
  "epoch": 3.0,
3
+ "train_loss": 0.23823042969992683,
4
+ "train_runtime": 80075.9645,
5
+ "train_samples": 65498,
6
+ "train_samples_per_second": 2.454,
7
+ "train_steps_per_second": 0.077
8
  }
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7d1c034b9307aebcdf4e489668a8dab5257c9a83cf3501117527ce02402cbfd6
3
- size 5752
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f498982c816f77c4a028605f52db4687309be01541cc29774c6624b9063e658
3
+ size 5688