Abinesh Sivakumar
commited on
Commit
·
8018296
1
Parent(s):
eb54c05
Delete https:
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/README.md +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/adapter_config.json +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/adapter_model.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/optimizer.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/rng_state.pth +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/scheduler.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/trainer_state.json +0 -319
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/training_args.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/README.md +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/adapter_config.json +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/adapter_model.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/optimizer.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/rng_state.pth +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/scheduler.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/trainer_state.json +0 -469
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/training_args.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/README.md +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/adapter_config.json +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/adapter_model.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/optimizer.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/rng_state.pth +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/scheduler.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/trainer_state.json +0 -619
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/training_args.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/README.md +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/adapter_config.json +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/adapter_model.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/optimizer.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/rng_state.pth +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/scheduler.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/trainer_state.json +0 -769
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/training_args.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/README.md +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/adapter_config.json +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/adapter_model.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/optimizer.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/rng_state.pth +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/scheduler.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/trainer_state.json +0 -919
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/training_args.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/README.md +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/adapter_config.json +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/adapter_model.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/optimizer.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/rng_state.pth +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/scheduler.pt +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/trainer_state.json +0 -1069
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/training_args.bin +0 -3
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-4000/README.md +0 -21
- https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-4000/adapter_config.json +0 -21
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/README.md
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
---
|
2 |
-
library_name: peft
|
3 |
-
---
|
4 |
-
## Training procedure
|
5 |
-
|
6 |
-
|
7 |
-
The following `bitsandbytes` quantization config was used during training:
|
8 |
-
- quant_method: QuantizationMethod.BITS_AND_BYTES
|
9 |
-
- load_in_8bit: False
|
10 |
-
- load_in_4bit: True
|
11 |
-
- llm_int8_threshold: 6.0
|
12 |
-
- llm_int8_skip_modules: None
|
13 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
14 |
-
- llm_int8_has_fp16_weight: False
|
15 |
-
- bnb_4bit_quant_type: nf4
|
16 |
-
- bnb_4bit_use_double_quant: True
|
17 |
-
- bnb_4bit_compute_dtype: bfloat16
|
18 |
-
### Framework versions
|
19 |
-
|
20 |
-
|
21 |
-
- PEFT 0.4.0.dev0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/adapter_config.json
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"base_model_name_or_path": "codellama/CodeLlama-13b-Instruct-hf",
|
3 |
-
"bias": "none",
|
4 |
-
"fan_in_fan_out": false,
|
5 |
-
"inference_mode": true,
|
6 |
-
"init_lora_weights": true,
|
7 |
-
"layers_pattern": null,
|
8 |
-
"layers_to_transform": null,
|
9 |
-
"lora_alpha": 32,
|
10 |
-
"lora_dropout": 0.05,
|
11 |
-
"modules_to_save": null,
|
12 |
-
"peft_type": "LORA",
|
13 |
-
"r": 8,
|
14 |
-
"revision": null,
|
15 |
-
"target_modules": [
|
16 |
-
"q_proj",
|
17 |
-
"k_proj",
|
18 |
-
"v_proj"
|
19 |
-
],
|
20 |
-
"task_type": "CAUSAL_LM"
|
21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/adapter_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f914b0990947e8af0d7abfcde78e4512e8b5bc4e639b4f9b095212d03a40e7f2
|
3 |
-
size 39407821
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:7d38567d868c29aaa39241ddaf2fde670173ed97a39499456176416b39413be8
|
3 |
-
size 78844165
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:898a6948390b5093738fb7b2c97359379a9b2ed8cb45fd6bf5d632aea978f1a3
|
3 |
-
size 14575
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:23efe624b19e546b381a09029c8ee3430909ac7bb2ed04e2f93a318e70a679ed
|
3 |
-
size 627
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/trainer_state.json
DELETED
@@ -1,319 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.1018109624953867,
|
5 |
-
"eval_steps": 500,
|
6 |
-
"global_step": 1000,
|
7 |
-
"is_hyper_param_search": false,
|
8 |
-
"is_local_process_zero": true,
|
9 |
-
"is_world_process_zero": true,
|
10 |
-
"log_history": [
|
11 |
-
{
|
12 |
-
"epoch": 0.0,
|
13 |
-
"learning_rate": 4e-05,
|
14 |
-
"loss": 3.5766,
|
15 |
-
"step": 20
|
16 |
-
},
|
17 |
-
{
|
18 |
-
"epoch": 0.0,
|
19 |
-
"learning_rate": 8e-05,
|
20 |
-
"loss": 2.9038,
|
21 |
-
"step": 40
|
22 |
-
},
|
23 |
-
{
|
24 |
-
"epoch": 0.01,
|
25 |
-
"learning_rate": 0.00012,
|
26 |
-
"loss": 1.9072,
|
27 |
-
"step": 60
|
28 |
-
},
|
29 |
-
{
|
30 |
-
"epoch": 0.01,
|
31 |
-
"learning_rate": 0.00016,
|
32 |
-
"loss": 1.7359,
|
33 |
-
"step": 80
|
34 |
-
},
|
35 |
-
{
|
36 |
-
"epoch": 0.01,
|
37 |
-
"learning_rate": 0.0002,
|
38 |
-
"loss": 1.5349,
|
39 |
-
"step": 100
|
40 |
-
},
|
41 |
-
{
|
42 |
-
"epoch": 0.01,
|
43 |
-
"learning_rate": 0.00019958856202427486,
|
44 |
-
"loss": 1.4908,
|
45 |
-
"step": 120
|
46 |
-
},
|
47 |
-
{
|
48 |
-
"epoch": 0.01,
|
49 |
-
"learning_rate": 0.00019917712404854968,
|
50 |
-
"loss": 1.4931,
|
51 |
-
"step": 140
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"epoch": 0.02,
|
55 |
-
"learning_rate": 0.00019876568607282453,
|
56 |
-
"loss": 1.4201,
|
57 |
-
"step": 160
|
58 |
-
},
|
59 |
-
{
|
60 |
-
"epoch": 0.02,
|
61 |
-
"learning_rate": 0.00019835424809709937,
|
62 |
-
"loss": 1.4541,
|
63 |
-
"step": 180
|
64 |
-
},
|
65 |
-
{
|
66 |
-
"epoch": 0.02,
|
67 |
-
"learning_rate": 0.00019794281012137422,
|
68 |
-
"loss": 1.4102,
|
69 |
-
"step": 200
|
70 |
-
},
|
71 |
-
{
|
72 |
-
"epoch": 0.02,
|
73 |
-
"learning_rate": 0.00019753137214564904,
|
74 |
-
"loss": 1.3861,
|
75 |
-
"step": 220
|
76 |
-
},
|
77 |
-
{
|
78 |
-
"epoch": 0.02,
|
79 |
-
"learning_rate": 0.0001971199341699239,
|
80 |
-
"loss": 1.3939,
|
81 |
-
"step": 240
|
82 |
-
},
|
83 |
-
{
|
84 |
-
"epoch": 0.03,
|
85 |
-
"learning_rate": 0.00019670849619419874,
|
86 |
-
"loss": 1.3883,
|
87 |
-
"step": 260
|
88 |
-
},
|
89 |
-
{
|
90 |
-
"epoch": 0.03,
|
91 |
-
"learning_rate": 0.00019629705821847356,
|
92 |
-
"loss": 1.3257,
|
93 |
-
"step": 280
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"epoch": 0.03,
|
97 |
-
"learning_rate": 0.0001958856202427484,
|
98 |
-
"loss": 1.386,
|
99 |
-
"step": 300
|
100 |
-
},
|
101 |
-
{
|
102 |
-
"epoch": 0.03,
|
103 |
-
"learning_rate": 0.00019547418226702326,
|
104 |
-
"loss": 1.3746,
|
105 |
-
"step": 320
|
106 |
-
},
|
107 |
-
{
|
108 |
-
"epoch": 0.03,
|
109 |
-
"learning_rate": 0.0001950627442912981,
|
110 |
-
"loss": 1.3266,
|
111 |
-
"step": 340
|
112 |
-
},
|
113 |
-
{
|
114 |
-
"epoch": 0.04,
|
115 |
-
"learning_rate": 0.00019465130631557293,
|
116 |
-
"loss": 1.3591,
|
117 |
-
"step": 360
|
118 |
-
},
|
119 |
-
{
|
120 |
-
"epoch": 0.04,
|
121 |
-
"learning_rate": 0.00019423986833984777,
|
122 |
-
"loss": 1.3464,
|
123 |
-
"step": 380
|
124 |
-
},
|
125 |
-
{
|
126 |
-
"epoch": 0.04,
|
127 |
-
"learning_rate": 0.00019382843036412262,
|
128 |
-
"loss": 1.3573,
|
129 |
-
"step": 400
|
130 |
-
},
|
131 |
-
{
|
132 |
-
"epoch": 0.04,
|
133 |
-
"learning_rate": 0.00019341699238839744,
|
134 |
-
"loss": 1.3455,
|
135 |
-
"step": 420
|
136 |
-
},
|
137 |
-
{
|
138 |
-
"epoch": 0.04,
|
139 |
-
"learning_rate": 0.0001930055544126723,
|
140 |
-
"loss": 1.3223,
|
141 |
-
"step": 440
|
142 |
-
},
|
143 |
-
{
|
144 |
-
"epoch": 0.05,
|
145 |
-
"learning_rate": 0.00019259411643694714,
|
146 |
-
"loss": 1.3293,
|
147 |
-
"step": 460
|
148 |
-
},
|
149 |
-
{
|
150 |
-
"epoch": 0.05,
|
151 |
-
"learning_rate": 0.000192182678461222,
|
152 |
-
"loss": 1.3033,
|
153 |
-
"step": 480
|
154 |
-
},
|
155 |
-
{
|
156 |
-
"epoch": 0.05,
|
157 |
-
"learning_rate": 0.0001917712404854968,
|
158 |
-
"loss": 1.3323,
|
159 |
-
"step": 500
|
160 |
-
},
|
161 |
-
{
|
162 |
-
"epoch": 0.05,
|
163 |
-
"learning_rate": 0.00019135980250977166,
|
164 |
-
"loss": 1.2838,
|
165 |
-
"step": 520
|
166 |
-
},
|
167 |
-
{
|
168 |
-
"epoch": 0.05,
|
169 |
-
"learning_rate": 0.0001909483645340465,
|
170 |
-
"loss": 1.332,
|
171 |
-
"step": 540
|
172 |
-
},
|
173 |
-
{
|
174 |
-
"epoch": 0.06,
|
175 |
-
"learning_rate": 0.00019053692655832133,
|
176 |
-
"loss": 1.3118,
|
177 |
-
"step": 560
|
178 |
-
},
|
179 |
-
{
|
180 |
-
"epoch": 0.06,
|
181 |
-
"learning_rate": 0.00019012548858259617,
|
182 |
-
"loss": 1.3237,
|
183 |
-
"step": 580
|
184 |
-
},
|
185 |
-
{
|
186 |
-
"epoch": 0.06,
|
187 |
-
"learning_rate": 0.00018971405060687102,
|
188 |
-
"loss": 1.2541,
|
189 |
-
"step": 600
|
190 |
-
},
|
191 |
-
{
|
192 |
-
"epoch": 0.06,
|
193 |
-
"learning_rate": 0.00018930261263114587,
|
194 |
-
"loss": 1.28,
|
195 |
-
"step": 620
|
196 |
-
},
|
197 |
-
{
|
198 |
-
"epoch": 0.07,
|
199 |
-
"learning_rate": 0.0001888911746554207,
|
200 |
-
"loss": 1.231,
|
201 |
-
"step": 640
|
202 |
-
},
|
203 |
-
{
|
204 |
-
"epoch": 0.07,
|
205 |
-
"learning_rate": 0.00018847973667969554,
|
206 |
-
"loss": 1.2676,
|
207 |
-
"step": 660
|
208 |
-
},
|
209 |
-
{
|
210 |
-
"epoch": 0.07,
|
211 |
-
"learning_rate": 0.0001880682987039704,
|
212 |
-
"loss": 1.2909,
|
213 |
-
"step": 680
|
214 |
-
},
|
215 |
-
{
|
216 |
-
"epoch": 0.07,
|
217 |
-
"learning_rate": 0.0001876568607282452,
|
218 |
-
"loss": 1.2499,
|
219 |
-
"step": 700
|
220 |
-
},
|
221 |
-
{
|
222 |
-
"epoch": 0.07,
|
223 |
-
"learning_rate": 0.00018724542275252006,
|
224 |
-
"loss": 1.2679,
|
225 |
-
"step": 720
|
226 |
-
},
|
227 |
-
{
|
228 |
-
"epoch": 0.08,
|
229 |
-
"learning_rate": 0.0001868339847767949,
|
230 |
-
"loss": 1.2674,
|
231 |
-
"step": 740
|
232 |
-
},
|
233 |
-
{
|
234 |
-
"epoch": 0.08,
|
235 |
-
"learning_rate": 0.00018642254680106975,
|
236 |
-
"loss": 1.2736,
|
237 |
-
"step": 760
|
238 |
-
},
|
239 |
-
{
|
240 |
-
"epoch": 0.08,
|
241 |
-
"learning_rate": 0.00018601110882534457,
|
242 |
-
"loss": 1.2843,
|
243 |
-
"step": 780
|
244 |
-
},
|
245 |
-
{
|
246 |
-
"epoch": 0.08,
|
247 |
-
"learning_rate": 0.00018559967084961942,
|
248 |
-
"loss": 1.281,
|
249 |
-
"step": 800
|
250 |
-
},
|
251 |
-
{
|
252 |
-
"epoch": 0.08,
|
253 |
-
"learning_rate": 0.00018518823287389427,
|
254 |
-
"loss": 1.3699,
|
255 |
-
"step": 820
|
256 |
-
},
|
257 |
-
{
|
258 |
-
"epoch": 0.09,
|
259 |
-
"learning_rate": 0.0001847767948981691,
|
260 |
-
"loss": 1.2705,
|
261 |
-
"step": 840
|
262 |
-
},
|
263 |
-
{
|
264 |
-
"epoch": 0.09,
|
265 |
-
"learning_rate": 0.00018436535692244394,
|
266 |
-
"loss": 1.2279,
|
267 |
-
"step": 860
|
268 |
-
},
|
269 |
-
{
|
270 |
-
"epoch": 0.09,
|
271 |
-
"learning_rate": 0.0001839539189467188,
|
272 |
-
"loss": 1.2779,
|
273 |
-
"step": 880
|
274 |
-
},
|
275 |
-
{
|
276 |
-
"epoch": 0.09,
|
277 |
-
"learning_rate": 0.00018354248097099364,
|
278 |
-
"loss": 1.2086,
|
279 |
-
"step": 900
|
280 |
-
},
|
281 |
-
{
|
282 |
-
"epoch": 0.09,
|
283 |
-
"learning_rate": 0.00018313104299526846,
|
284 |
-
"loss": 1.2999,
|
285 |
-
"step": 920
|
286 |
-
},
|
287 |
-
{
|
288 |
-
"epoch": 0.1,
|
289 |
-
"learning_rate": 0.0001827196050195433,
|
290 |
-
"loss": 1.2503,
|
291 |
-
"step": 940
|
292 |
-
},
|
293 |
-
{
|
294 |
-
"epoch": 0.1,
|
295 |
-
"learning_rate": 0.00018230816704381815,
|
296 |
-
"loss": 1.2466,
|
297 |
-
"step": 960
|
298 |
-
},
|
299 |
-
{
|
300 |
-
"epoch": 0.1,
|
301 |
-
"learning_rate": 0.00018189672906809297,
|
302 |
-
"loss": 1.2113,
|
303 |
-
"step": 980
|
304 |
-
},
|
305 |
-
{
|
306 |
-
"epoch": 0.1,
|
307 |
-
"learning_rate": 0.00018148529109236782,
|
308 |
-
"loss": 1.2356,
|
309 |
-
"step": 1000
|
310 |
-
}
|
311 |
-
],
|
312 |
-
"logging_steps": 20,
|
313 |
-
"max_steps": 9822,
|
314 |
-
"num_train_epochs": 1,
|
315 |
-
"save_steps": 500,
|
316 |
-
"total_flos": 1.45080101465088e+16,
|
317 |
-
"trial_name": null,
|
318 |
-
"trial_params": null
|
319 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1000/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:8819fc087370c0c0dd1869922822cf7a5ebe84fa7a7194c69a0ec917ff22569b
|
3 |
-
size 4027
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/README.md
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
---
|
2 |
-
library_name: peft
|
3 |
-
---
|
4 |
-
## Training procedure
|
5 |
-
|
6 |
-
|
7 |
-
The following `bitsandbytes` quantization config was used during training:
|
8 |
-
- quant_method: QuantizationMethod.BITS_AND_BYTES
|
9 |
-
- load_in_8bit: False
|
10 |
-
- load_in_4bit: True
|
11 |
-
- llm_int8_threshold: 6.0
|
12 |
-
- llm_int8_skip_modules: None
|
13 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
14 |
-
- llm_int8_has_fp16_weight: False
|
15 |
-
- bnb_4bit_quant_type: nf4
|
16 |
-
- bnb_4bit_use_double_quant: True
|
17 |
-
- bnb_4bit_compute_dtype: bfloat16
|
18 |
-
### Framework versions
|
19 |
-
|
20 |
-
|
21 |
-
- PEFT 0.4.0.dev0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/adapter_config.json
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"base_model_name_or_path": "codellama/CodeLlama-13b-Instruct-hf",
|
3 |
-
"bias": "none",
|
4 |
-
"fan_in_fan_out": false,
|
5 |
-
"inference_mode": true,
|
6 |
-
"init_lora_weights": true,
|
7 |
-
"layers_pattern": null,
|
8 |
-
"layers_to_transform": null,
|
9 |
-
"lora_alpha": 32,
|
10 |
-
"lora_dropout": 0.05,
|
11 |
-
"modules_to_save": null,
|
12 |
-
"peft_type": "LORA",
|
13 |
-
"r": 8,
|
14 |
-
"revision": null,
|
15 |
-
"target_modules": [
|
16 |
-
"q_proj",
|
17 |
-
"k_proj",
|
18 |
-
"v_proj"
|
19 |
-
],
|
20 |
-
"task_type": "CAUSAL_LM"
|
21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/adapter_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:4c6b0831fc2d11fac7dd78434203aefcffbfbc4a3abe53ba55fc1832f3e0a656
|
3 |
-
size 39407821
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:db850f3db5ddc9d314504fbc945e2f5f38dc9904f7041be2d2b8b2258bdb5292
|
3 |
-
size 78844165
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:0184552a92e3eea244ca8525508faa432cc2ef3b3619da07af22a8f830deedd7
|
3 |
-
size 14575
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:39ecefbbaaea1fa61a9e8640351957e5ab0b16bcd5b68bc8acb5a1ae782aa095
|
3 |
-
size 627
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/trainer_state.json
DELETED
@@ -1,469 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.15271644374308005,
|
5 |
-
"eval_steps": 500,
|
6 |
-
"global_step": 1500,
|
7 |
-
"is_hyper_param_search": false,
|
8 |
-
"is_local_process_zero": true,
|
9 |
-
"is_world_process_zero": true,
|
10 |
-
"log_history": [
|
11 |
-
{
|
12 |
-
"epoch": 0.0,
|
13 |
-
"learning_rate": 4e-05,
|
14 |
-
"loss": 3.5766,
|
15 |
-
"step": 20
|
16 |
-
},
|
17 |
-
{
|
18 |
-
"epoch": 0.0,
|
19 |
-
"learning_rate": 8e-05,
|
20 |
-
"loss": 2.9038,
|
21 |
-
"step": 40
|
22 |
-
},
|
23 |
-
{
|
24 |
-
"epoch": 0.01,
|
25 |
-
"learning_rate": 0.00012,
|
26 |
-
"loss": 1.9072,
|
27 |
-
"step": 60
|
28 |
-
},
|
29 |
-
{
|
30 |
-
"epoch": 0.01,
|
31 |
-
"learning_rate": 0.00016,
|
32 |
-
"loss": 1.7359,
|
33 |
-
"step": 80
|
34 |
-
},
|
35 |
-
{
|
36 |
-
"epoch": 0.01,
|
37 |
-
"learning_rate": 0.0002,
|
38 |
-
"loss": 1.5349,
|
39 |
-
"step": 100
|
40 |
-
},
|
41 |
-
{
|
42 |
-
"epoch": 0.01,
|
43 |
-
"learning_rate": 0.00019958856202427486,
|
44 |
-
"loss": 1.4908,
|
45 |
-
"step": 120
|
46 |
-
},
|
47 |
-
{
|
48 |
-
"epoch": 0.01,
|
49 |
-
"learning_rate": 0.00019917712404854968,
|
50 |
-
"loss": 1.4931,
|
51 |
-
"step": 140
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"epoch": 0.02,
|
55 |
-
"learning_rate": 0.00019876568607282453,
|
56 |
-
"loss": 1.4201,
|
57 |
-
"step": 160
|
58 |
-
},
|
59 |
-
{
|
60 |
-
"epoch": 0.02,
|
61 |
-
"learning_rate": 0.00019835424809709937,
|
62 |
-
"loss": 1.4541,
|
63 |
-
"step": 180
|
64 |
-
},
|
65 |
-
{
|
66 |
-
"epoch": 0.02,
|
67 |
-
"learning_rate": 0.00019794281012137422,
|
68 |
-
"loss": 1.4102,
|
69 |
-
"step": 200
|
70 |
-
},
|
71 |
-
{
|
72 |
-
"epoch": 0.02,
|
73 |
-
"learning_rate": 0.00019753137214564904,
|
74 |
-
"loss": 1.3861,
|
75 |
-
"step": 220
|
76 |
-
},
|
77 |
-
{
|
78 |
-
"epoch": 0.02,
|
79 |
-
"learning_rate": 0.0001971199341699239,
|
80 |
-
"loss": 1.3939,
|
81 |
-
"step": 240
|
82 |
-
},
|
83 |
-
{
|
84 |
-
"epoch": 0.03,
|
85 |
-
"learning_rate": 0.00019670849619419874,
|
86 |
-
"loss": 1.3883,
|
87 |
-
"step": 260
|
88 |
-
},
|
89 |
-
{
|
90 |
-
"epoch": 0.03,
|
91 |
-
"learning_rate": 0.00019629705821847356,
|
92 |
-
"loss": 1.3257,
|
93 |
-
"step": 280
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"epoch": 0.03,
|
97 |
-
"learning_rate": 0.0001958856202427484,
|
98 |
-
"loss": 1.386,
|
99 |
-
"step": 300
|
100 |
-
},
|
101 |
-
{
|
102 |
-
"epoch": 0.03,
|
103 |
-
"learning_rate": 0.00019547418226702326,
|
104 |
-
"loss": 1.3746,
|
105 |
-
"step": 320
|
106 |
-
},
|
107 |
-
{
|
108 |
-
"epoch": 0.03,
|
109 |
-
"learning_rate": 0.0001950627442912981,
|
110 |
-
"loss": 1.3266,
|
111 |
-
"step": 340
|
112 |
-
},
|
113 |
-
{
|
114 |
-
"epoch": 0.04,
|
115 |
-
"learning_rate": 0.00019465130631557293,
|
116 |
-
"loss": 1.3591,
|
117 |
-
"step": 360
|
118 |
-
},
|
119 |
-
{
|
120 |
-
"epoch": 0.04,
|
121 |
-
"learning_rate": 0.00019423986833984777,
|
122 |
-
"loss": 1.3464,
|
123 |
-
"step": 380
|
124 |
-
},
|
125 |
-
{
|
126 |
-
"epoch": 0.04,
|
127 |
-
"learning_rate": 0.00019382843036412262,
|
128 |
-
"loss": 1.3573,
|
129 |
-
"step": 400
|
130 |
-
},
|
131 |
-
{
|
132 |
-
"epoch": 0.04,
|
133 |
-
"learning_rate": 0.00019341699238839744,
|
134 |
-
"loss": 1.3455,
|
135 |
-
"step": 420
|
136 |
-
},
|
137 |
-
{
|
138 |
-
"epoch": 0.04,
|
139 |
-
"learning_rate": 0.0001930055544126723,
|
140 |
-
"loss": 1.3223,
|
141 |
-
"step": 440
|
142 |
-
},
|
143 |
-
{
|
144 |
-
"epoch": 0.05,
|
145 |
-
"learning_rate": 0.00019259411643694714,
|
146 |
-
"loss": 1.3293,
|
147 |
-
"step": 460
|
148 |
-
},
|
149 |
-
{
|
150 |
-
"epoch": 0.05,
|
151 |
-
"learning_rate": 0.000192182678461222,
|
152 |
-
"loss": 1.3033,
|
153 |
-
"step": 480
|
154 |
-
},
|
155 |
-
{
|
156 |
-
"epoch": 0.05,
|
157 |
-
"learning_rate": 0.0001917712404854968,
|
158 |
-
"loss": 1.3323,
|
159 |
-
"step": 500
|
160 |
-
},
|
161 |
-
{
|
162 |
-
"epoch": 0.05,
|
163 |
-
"learning_rate": 0.00019135980250977166,
|
164 |
-
"loss": 1.2838,
|
165 |
-
"step": 520
|
166 |
-
},
|
167 |
-
{
|
168 |
-
"epoch": 0.05,
|
169 |
-
"learning_rate": 0.0001909483645340465,
|
170 |
-
"loss": 1.332,
|
171 |
-
"step": 540
|
172 |
-
},
|
173 |
-
{
|
174 |
-
"epoch": 0.06,
|
175 |
-
"learning_rate": 0.00019053692655832133,
|
176 |
-
"loss": 1.3118,
|
177 |
-
"step": 560
|
178 |
-
},
|
179 |
-
{
|
180 |
-
"epoch": 0.06,
|
181 |
-
"learning_rate": 0.00019012548858259617,
|
182 |
-
"loss": 1.3237,
|
183 |
-
"step": 580
|
184 |
-
},
|
185 |
-
{
|
186 |
-
"epoch": 0.06,
|
187 |
-
"learning_rate": 0.00018971405060687102,
|
188 |
-
"loss": 1.2541,
|
189 |
-
"step": 600
|
190 |
-
},
|
191 |
-
{
|
192 |
-
"epoch": 0.06,
|
193 |
-
"learning_rate": 0.00018930261263114587,
|
194 |
-
"loss": 1.28,
|
195 |
-
"step": 620
|
196 |
-
},
|
197 |
-
{
|
198 |
-
"epoch": 0.07,
|
199 |
-
"learning_rate": 0.0001888911746554207,
|
200 |
-
"loss": 1.231,
|
201 |
-
"step": 640
|
202 |
-
},
|
203 |
-
{
|
204 |
-
"epoch": 0.07,
|
205 |
-
"learning_rate": 0.00018847973667969554,
|
206 |
-
"loss": 1.2676,
|
207 |
-
"step": 660
|
208 |
-
},
|
209 |
-
{
|
210 |
-
"epoch": 0.07,
|
211 |
-
"learning_rate": 0.0001880682987039704,
|
212 |
-
"loss": 1.2909,
|
213 |
-
"step": 680
|
214 |
-
},
|
215 |
-
{
|
216 |
-
"epoch": 0.07,
|
217 |
-
"learning_rate": 0.0001876568607282452,
|
218 |
-
"loss": 1.2499,
|
219 |
-
"step": 700
|
220 |
-
},
|
221 |
-
{
|
222 |
-
"epoch": 0.07,
|
223 |
-
"learning_rate": 0.00018724542275252006,
|
224 |
-
"loss": 1.2679,
|
225 |
-
"step": 720
|
226 |
-
},
|
227 |
-
{
|
228 |
-
"epoch": 0.08,
|
229 |
-
"learning_rate": 0.0001868339847767949,
|
230 |
-
"loss": 1.2674,
|
231 |
-
"step": 740
|
232 |
-
},
|
233 |
-
{
|
234 |
-
"epoch": 0.08,
|
235 |
-
"learning_rate": 0.00018642254680106975,
|
236 |
-
"loss": 1.2736,
|
237 |
-
"step": 760
|
238 |
-
},
|
239 |
-
{
|
240 |
-
"epoch": 0.08,
|
241 |
-
"learning_rate": 0.00018601110882534457,
|
242 |
-
"loss": 1.2843,
|
243 |
-
"step": 780
|
244 |
-
},
|
245 |
-
{
|
246 |
-
"epoch": 0.08,
|
247 |
-
"learning_rate": 0.00018559967084961942,
|
248 |
-
"loss": 1.281,
|
249 |
-
"step": 800
|
250 |
-
},
|
251 |
-
{
|
252 |
-
"epoch": 0.08,
|
253 |
-
"learning_rate": 0.00018518823287389427,
|
254 |
-
"loss": 1.3699,
|
255 |
-
"step": 820
|
256 |
-
},
|
257 |
-
{
|
258 |
-
"epoch": 0.09,
|
259 |
-
"learning_rate": 0.0001847767948981691,
|
260 |
-
"loss": 1.2705,
|
261 |
-
"step": 840
|
262 |
-
},
|
263 |
-
{
|
264 |
-
"epoch": 0.09,
|
265 |
-
"learning_rate": 0.00018436535692244394,
|
266 |
-
"loss": 1.2279,
|
267 |
-
"step": 860
|
268 |
-
},
|
269 |
-
{
|
270 |
-
"epoch": 0.09,
|
271 |
-
"learning_rate": 0.0001839539189467188,
|
272 |
-
"loss": 1.2779,
|
273 |
-
"step": 880
|
274 |
-
},
|
275 |
-
{
|
276 |
-
"epoch": 0.09,
|
277 |
-
"learning_rate": 0.00018354248097099364,
|
278 |
-
"loss": 1.2086,
|
279 |
-
"step": 900
|
280 |
-
},
|
281 |
-
{
|
282 |
-
"epoch": 0.09,
|
283 |
-
"learning_rate": 0.00018313104299526846,
|
284 |
-
"loss": 1.2999,
|
285 |
-
"step": 920
|
286 |
-
},
|
287 |
-
{
|
288 |
-
"epoch": 0.1,
|
289 |
-
"learning_rate": 0.0001827196050195433,
|
290 |
-
"loss": 1.2503,
|
291 |
-
"step": 940
|
292 |
-
},
|
293 |
-
{
|
294 |
-
"epoch": 0.1,
|
295 |
-
"learning_rate": 0.00018230816704381815,
|
296 |
-
"loss": 1.2466,
|
297 |
-
"step": 960
|
298 |
-
},
|
299 |
-
{
|
300 |
-
"epoch": 0.1,
|
301 |
-
"learning_rate": 0.00018189672906809297,
|
302 |
-
"loss": 1.2113,
|
303 |
-
"step": 980
|
304 |
-
},
|
305 |
-
{
|
306 |
-
"epoch": 0.1,
|
307 |
-
"learning_rate": 0.00018148529109236782,
|
308 |
-
"loss": 1.2356,
|
309 |
-
"step": 1000
|
310 |
-
},
|
311 |
-
{
|
312 |
-
"epoch": 0.1,
|
313 |
-
"learning_rate": 0.00018107385311664267,
|
314 |
-
"loss": 1.2631,
|
315 |
-
"step": 1020
|
316 |
-
},
|
317 |
-
{
|
318 |
-
"epoch": 0.11,
|
319 |
-
"learning_rate": 0.00018066241514091752,
|
320 |
-
"loss": 1.2443,
|
321 |
-
"step": 1040
|
322 |
-
},
|
323 |
-
{
|
324 |
-
"epoch": 0.11,
|
325 |
-
"learning_rate": 0.00018025097716519234,
|
326 |
-
"loss": 1.2406,
|
327 |
-
"step": 1060
|
328 |
-
},
|
329 |
-
{
|
330 |
-
"epoch": 0.11,
|
331 |
-
"learning_rate": 0.0001798395391894672,
|
332 |
-
"loss": 1.2308,
|
333 |
-
"step": 1080
|
334 |
-
},
|
335 |
-
{
|
336 |
-
"epoch": 0.11,
|
337 |
-
"learning_rate": 0.00017942810121374204,
|
338 |
-
"loss": 1.2649,
|
339 |
-
"step": 1100
|
340 |
-
},
|
341 |
-
{
|
342 |
-
"epoch": 0.11,
|
343 |
-
"learning_rate": 0.00017901666323801686,
|
344 |
-
"loss": 1.2263,
|
345 |
-
"step": 1120
|
346 |
-
},
|
347 |
-
{
|
348 |
-
"epoch": 0.12,
|
349 |
-
"learning_rate": 0.0001786052252622917,
|
350 |
-
"loss": 1.2869,
|
351 |
-
"step": 1140
|
352 |
-
},
|
353 |
-
{
|
354 |
-
"epoch": 0.12,
|
355 |
-
"learning_rate": 0.00017819378728656655,
|
356 |
-
"loss": 1.2255,
|
357 |
-
"step": 1160
|
358 |
-
},
|
359 |
-
{
|
360 |
-
"epoch": 0.12,
|
361 |
-
"learning_rate": 0.0001777823493108414,
|
362 |
-
"loss": 1.2596,
|
363 |
-
"step": 1180
|
364 |
-
},
|
365 |
-
{
|
366 |
-
"epoch": 0.12,
|
367 |
-
"learning_rate": 0.00017737091133511622,
|
368 |
-
"loss": 1.2748,
|
369 |
-
"step": 1200
|
370 |
-
},
|
371 |
-
{
|
372 |
-
"epoch": 0.12,
|
373 |
-
"learning_rate": 0.00017695947335939107,
|
374 |
-
"loss": 1.2587,
|
375 |
-
"step": 1220
|
376 |
-
},
|
377 |
-
{
|
378 |
-
"epoch": 0.13,
|
379 |
-
"learning_rate": 0.00017654803538366592,
|
380 |
-
"loss": 1.2651,
|
381 |
-
"step": 1240
|
382 |
-
},
|
383 |
-
{
|
384 |
-
"epoch": 0.13,
|
385 |
-
"learning_rate": 0.00017613659740794074,
|
386 |
-
"loss": 1.2659,
|
387 |
-
"step": 1260
|
388 |
-
},
|
389 |
-
{
|
390 |
-
"epoch": 0.13,
|
391 |
-
"learning_rate": 0.0001757251594322156,
|
392 |
-
"loss": 1.2077,
|
393 |
-
"step": 1280
|
394 |
-
},
|
395 |
-
{
|
396 |
-
"epoch": 0.13,
|
397 |
-
"learning_rate": 0.00017531372145649044,
|
398 |
-
"loss": 1.25,
|
399 |
-
"step": 1300
|
400 |
-
},
|
401 |
-
{
|
402 |
-
"epoch": 0.13,
|
403 |
-
"learning_rate": 0.00017490228348076528,
|
404 |
-
"loss": 1.2149,
|
405 |
-
"step": 1320
|
406 |
-
},
|
407 |
-
{
|
408 |
-
"epoch": 0.14,
|
409 |
-
"learning_rate": 0.0001744908455050401,
|
410 |
-
"loss": 1.2417,
|
411 |
-
"step": 1340
|
412 |
-
},
|
413 |
-
{
|
414 |
-
"epoch": 0.14,
|
415 |
-
"learning_rate": 0.00017407940752931498,
|
416 |
-
"loss": 1.1939,
|
417 |
-
"step": 1360
|
418 |
-
},
|
419 |
-
{
|
420 |
-
"epoch": 0.14,
|
421 |
-
"learning_rate": 0.00017366796955358983,
|
422 |
-
"loss": 1.2688,
|
423 |
-
"step": 1380
|
424 |
-
},
|
425 |
-
{
|
426 |
-
"epoch": 0.14,
|
427 |
-
"learning_rate": 0.00017325653157786465,
|
428 |
-
"loss": 1.2287,
|
429 |
-
"step": 1400
|
430 |
-
},
|
431 |
-
{
|
432 |
-
"epoch": 0.14,
|
433 |
-
"learning_rate": 0.0001728450936021395,
|
434 |
-
"loss": 1.2931,
|
435 |
-
"step": 1420
|
436 |
-
},
|
437 |
-
{
|
438 |
-
"epoch": 0.15,
|
439 |
-
"learning_rate": 0.00017243365562641435,
|
440 |
-
"loss": 1.2695,
|
441 |
-
"step": 1440
|
442 |
-
},
|
443 |
-
{
|
444 |
-
"epoch": 0.15,
|
445 |
-
"learning_rate": 0.00017202221765068917,
|
446 |
-
"loss": 1.2228,
|
447 |
-
"step": 1460
|
448 |
-
},
|
449 |
-
{
|
450 |
-
"epoch": 0.15,
|
451 |
-
"learning_rate": 0.00017161077967496401,
|
452 |
-
"loss": 1.2419,
|
453 |
-
"step": 1480
|
454 |
-
},
|
455 |
-
{
|
456 |
-
"epoch": 0.15,
|
457 |
-
"learning_rate": 0.00017119934169923886,
|
458 |
-
"loss": 1.2483,
|
459 |
-
"step": 1500
|
460 |
-
}
|
461 |
-
],
|
462 |
-
"logging_steps": 20,
|
463 |
-
"max_steps": 9822,
|
464 |
-
"num_train_epochs": 1,
|
465 |
-
"save_steps": 500,
|
466 |
-
"total_flos": 2.177523382889472e+16,
|
467 |
-
"trial_name": null,
|
468 |
-
"trial_params": null
|
469 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-1500/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:8819fc087370c0c0dd1869922822cf7a5ebe84fa7a7194c69a0ec917ff22569b
|
3 |
-
size 4027
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/README.md
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
---
|
2 |
-
library_name: peft
|
3 |
-
---
|
4 |
-
## Training procedure
|
5 |
-
|
6 |
-
|
7 |
-
The following `bitsandbytes` quantization config was used during training:
|
8 |
-
- quant_method: QuantizationMethod.BITS_AND_BYTES
|
9 |
-
- load_in_8bit: False
|
10 |
-
- load_in_4bit: True
|
11 |
-
- llm_int8_threshold: 6.0
|
12 |
-
- llm_int8_skip_modules: None
|
13 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
14 |
-
- llm_int8_has_fp16_weight: False
|
15 |
-
- bnb_4bit_quant_type: nf4
|
16 |
-
- bnb_4bit_use_double_quant: True
|
17 |
-
- bnb_4bit_compute_dtype: bfloat16
|
18 |
-
### Framework versions
|
19 |
-
|
20 |
-
|
21 |
-
- PEFT 0.4.0.dev0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/adapter_config.json
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"base_model_name_or_path": "codellama/CodeLlama-13b-Instruct-hf",
|
3 |
-
"bias": "none",
|
4 |
-
"fan_in_fan_out": false,
|
5 |
-
"inference_mode": true,
|
6 |
-
"init_lora_weights": true,
|
7 |
-
"layers_pattern": null,
|
8 |
-
"layers_to_transform": null,
|
9 |
-
"lora_alpha": 32,
|
10 |
-
"lora_dropout": 0.05,
|
11 |
-
"modules_to_save": null,
|
12 |
-
"peft_type": "LORA",
|
13 |
-
"r": 8,
|
14 |
-
"revision": null,
|
15 |
-
"target_modules": [
|
16 |
-
"q_proj",
|
17 |
-
"k_proj",
|
18 |
-
"v_proj"
|
19 |
-
],
|
20 |
-
"task_type": "CAUSAL_LM"
|
21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/adapter_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:bf61645713ce00098c5c8cc926e5605bd03b1b707618486b06702b128bbe5eaf
|
3 |
-
size 39407821
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:5050b0af89198d5f8bd3cd59ea636fb02bda929e29ce98fa4ca2b838a733c6e9
|
3 |
-
size 78844165
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:c95f00681f2f9ca9879f2b6557dcf0d12bcb73e1a027aa0fcdd2f0c5c7828aa9
|
3 |
-
size 14575
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:e3a1ccb9a0f957748e69d536bb952c4ccbae393eb0f6c8061358a6633020031a
|
3 |
-
size 627
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/trainer_state.json
DELETED
@@ -1,619 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.2036219249907734,
|
5 |
-
"eval_steps": 500,
|
6 |
-
"global_step": 2000,
|
7 |
-
"is_hyper_param_search": false,
|
8 |
-
"is_local_process_zero": true,
|
9 |
-
"is_world_process_zero": true,
|
10 |
-
"log_history": [
|
11 |
-
{
|
12 |
-
"epoch": 0.0,
|
13 |
-
"learning_rate": 4e-05,
|
14 |
-
"loss": 3.5766,
|
15 |
-
"step": 20
|
16 |
-
},
|
17 |
-
{
|
18 |
-
"epoch": 0.0,
|
19 |
-
"learning_rate": 8e-05,
|
20 |
-
"loss": 2.9038,
|
21 |
-
"step": 40
|
22 |
-
},
|
23 |
-
{
|
24 |
-
"epoch": 0.01,
|
25 |
-
"learning_rate": 0.00012,
|
26 |
-
"loss": 1.9072,
|
27 |
-
"step": 60
|
28 |
-
},
|
29 |
-
{
|
30 |
-
"epoch": 0.01,
|
31 |
-
"learning_rate": 0.00016,
|
32 |
-
"loss": 1.7359,
|
33 |
-
"step": 80
|
34 |
-
},
|
35 |
-
{
|
36 |
-
"epoch": 0.01,
|
37 |
-
"learning_rate": 0.0002,
|
38 |
-
"loss": 1.5349,
|
39 |
-
"step": 100
|
40 |
-
},
|
41 |
-
{
|
42 |
-
"epoch": 0.01,
|
43 |
-
"learning_rate": 0.00019958856202427486,
|
44 |
-
"loss": 1.4908,
|
45 |
-
"step": 120
|
46 |
-
},
|
47 |
-
{
|
48 |
-
"epoch": 0.01,
|
49 |
-
"learning_rate": 0.00019917712404854968,
|
50 |
-
"loss": 1.4931,
|
51 |
-
"step": 140
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"epoch": 0.02,
|
55 |
-
"learning_rate": 0.00019876568607282453,
|
56 |
-
"loss": 1.4201,
|
57 |
-
"step": 160
|
58 |
-
},
|
59 |
-
{
|
60 |
-
"epoch": 0.02,
|
61 |
-
"learning_rate": 0.00019835424809709937,
|
62 |
-
"loss": 1.4541,
|
63 |
-
"step": 180
|
64 |
-
},
|
65 |
-
{
|
66 |
-
"epoch": 0.02,
|
67 |
-
"learning_rate": 0.00019794281012137422,
|
68 |
-
"loss": 1.4102,
|
69 |
-
"step": 200
|
70 |
-
},
|
71 |
-
{
|
72 |
-
"epoch": 0.02,
|
73 |
-
"learning_rate": 0.00019753137214564904,
|
74 |
-
"loss": 1.3861,
|
75 |
-
"step": 220
|
76 |
-
},
|
77 |
-
{
|
78 |
-
"epoch": 0.02,
|
79 |
-
"learning_rate": 0.0001971199341699239,
|
80 |
-
"loss": 1.3939,
|
81 |
-
"step": 240
|
82 |
-
},
|
83 |
-
{
|
84 |
-
"epoch": 0.03,
|
85 |
-
"learning_rate": 0.00019670849619419874,
|
86 |
-
"loss": 1.3883,
|
87 |
-
"step": 260
|
88 |
-
},
|
89 |
-
{
|
90 |
-
"epoch": 0.03,
|
91 |
-
"learning_rate": 0.00019629705821847356,
|
92 |
-
"loss": 1.3257,
|
93 |
-
"step": 280
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"epoch": 0.03,
|
97 |
-
"learning_rate": 0.0001958856202427484,
|
98 |
-
"loss": 1.386,
|
99 |
-
"step": 300
|
100 |
-
},
|
101 |
-
{
|
102 |
-
"epoch": 0.03,
|
103 |
-
"learning_rate": 0.00019547418226702326,
|
104 |
-
"loss": 1.3746,
|
105 |
-
"step": 320
|
106 |
-
},
|
107 |
-
{
|
108 |
-
"epoch": 0.03,
|
109 |
-
"learning_rate": 0.0001950627442912981,
|
110 |
-
"loss": 1.3266,
|
111 |
-
"step": 340
|
112 |
-
},
|
113 |
-
{
|
114 |
-
"epoch": 0.04,
|
115 |
-
"learning_rate": 0.00019465130631557293,
|
116 |
-
"loss": 1.3591,
|
117 |
-
"step": 360
|
118 |
-
},
|
119 |
-
{
|
120 |
-
"epoch": 0.04,
|
121 |
-
"learning_rate": 0.00019423986833984777,
|
122 |
-
"loss": 1.3464,
|
123 |
-
"step": 380
|
124 |
-
},
|
125 |
-
{
|
126 |
-
"epoch": 0.04,
|
127 |
-
"learning_rate": 0.00019382843036412262,
|
128 |
-
"loss": 1.3573,
|
129 |
-
"step": 400
|
130 |
-
},
|
131 |
-
{
|
132 |
-
"epoch": 0.04,
|
133 |
-
"learning_rate": 0.00019341699238839744,
|
134 |
-
"loss": 1.3455,
|
135 |
-
"step": 420
|
136 |
-
},
|
137 |
-
{
|
138 |
-
"epoch": 0.04,
|
139 |
-
"learning_rate": 0.0001930055544126723,
|
140 |
-
"loss": 1.3223,
|
141 |
-
"step": 440
|
142 |
-
},
|
143 |
-
{
|
144 |
-
"epoch": 0.05,
|
145 |
-
"learning_rate": 0.00019259411643694714,
|
146 |
-
"loss": 1.3293,
|
147 |
-
"step": 460
|
148 |
-
},
|
149 |
-
{
|
150 |
-
"epoch": 0.05,
|
151 |
-
"learning_rate": 0.000192182678461222,
|
152 |
-
"loss": 1.3033,
|
153 |
-
"step": 480
|
154 |
-
},
|
155 |
-
{
|
156 |
-
"epoch": 0.05,
|
157 |
-
"learning_rate": 0.0001917712404854968,
|
158 |
-
"loss": 1.3323,
|
159 |
-
"step": 500
|
160 |
-
},
|
161 |
-
{
|
162 |
-
"epoch": 0.05,
|
163 |
-
"learning_rate": 0.00019135980250977166,
|
164 |
-
"loss": 1.2838,
|
165 |
-
"step": 520
|
166 |
-
},
|
167 |
-
{
|
168 |
-
"epoch": 0.05,
|
169 |
-
"learning_rate": 0.0001909483645340465,
|
170 |
-
"loss": 1.332,
|
171 |
-
"step": 540
|
172 |
-
},
|
173 |
-
{
|
174 |
-
"epoch": 0.06,
|
175 |
-
"learning_rate": 0.00019053692655832133,
|
176 |
-
"loss": 1.3118,
|
177 |
-
"step": 560
|
178 |
-
},
|
179 |
-
{
|
180 |
-
"epoch": 0.06,
|
181 |
-
"learning_rate": 0.00019012548858259617,
|
182 |
-
"loss": 1.3237,
|
183 |
-
"step": 580
|
184 |
-
},
|
185 |
-
{
|
186 |
-
"epoch": 0.06,
|
187 |
-
"learning_rate": 0.00018971405060687102,
|
188 |
-
"loss": 1.2541,
|
189 |
-
"step": 600
|
190 |
-
},
|
191 |
-
{
|
192 |
-
"epoch": 0.06,
|
193 |
-
"learning_rate": 0.00018930261263114587,
|
194 |
-
"loss": 1.28,
|
195 |
-
"step": 620
|
196 |
-
},
|
197 |
-
{
|
198 |
-
"epoch": 0.07,
|
199 |
-
"learning_rate": 0.0001888911746554207,
|
200 |
-
"loss": 1.231,
|
201 |
-
"step": 640
|
202 |
-
},
|
203 |
-
{
|
204 |
-
"epoch": 0.07,
|
205 |
-
"learning_rate": 0.00018847973667969554,
|
206 |
-
"loss": 1.2676,
|
207 |
-
"step": 660
|
208 |
-
},
|
209 |
-
{
|
210 |
-
"epoch": 0.07,
|
211 |
-
"learning_rate": 0.0001880682987039704,
|
212 |
-
"loss": 1.2909,
|
213 |
-
"step": 680
|
214 |
-
},
|
215 |
-
{
|
216 |
-
"epoch": 0.07,
|
217 |
-
"learning_rate": 0.0001876568607282452,
|
218 |
-
"loss": 1.2499,
|
219 |
-
"step": 700
|
220 |
-
},
|
221 |
-
{
|
222 |
-
"epoch": 0.07,
|
223 |
-
"learning_rate": 0.00018724542275252006,
|
224 |
-
"loss": 1.2679,
|
225 |
-
"step": 720
|
226 |
-
},
|
227 |
-
{
|
228 |
-
"epoch": 0.08,
|
229 |
-
"learning_rate": 0.0001868339847767949,
|
230 |
-
"loss": 1.2674,
|
231 |
-
"step": 740
|
232 |
-
},
|
233 |
-
{
|
234 |
-
"epoch": 0.08,
|
235 |
-
"learning_rate": 0.00018642254680106975,
|
236 |
-
"loss": 1.2736,
|
237 |
-
"step": 760
|
238 |
-
},
|
239 |
-
{
|
240 |
-
"epoch": 0.08,
|
241 |
-
"learning_rate": 0.00018601110882534457,
|
242 |
-
"loss": 1.2843,
|
243 |
-
"step": 780
|
244 |
-
},
|
245 |
-
{
|
246 |
-
"epoch": 0.08,
|
247 |
-
"learning_rate": 0.00018559967084961942,
|
248 |
-
"loss": 1.281,
|
249 |
-
"step": 800
|
250 |
-
},
|
251 |
-
{
|
252 |
-
"epoch": 0.08,
|
253 |
-
"learning_rate": 0.00018518823287389427,
|
254 |
-
"loss": 1.3699,
|
255 |
-
"step": 820
|
256 |
-
},
|
257 |
-
{
|
258 |
-
"epoch": 0.09,
|
259 |
-
"learning_rate": 0.0001847767948981691,
|
260 |
-
"loss": 1.2705,
|
261 |
-
"step": 840
|
262 |
-
},
|
263 |
-
{
|
264 |
-
"epoch": 0.09,
|
265 |
-
"learning_rate": 0.00018436535692244394,
|
266 |
-
"loss": 1.2279,
|
267 |
-
"step": 860
|
268 |
-
},
|
269 |
-
{
|
270 |
-
"epoch": 0.09,
|
271 |
-
"learning_rate": 0.0001839539189467188,
|
272 |
-
"loss": 1.2779,
|
273 |
-
"step": 880
|
274 |
-
},
|
275 |
-
{
|
276 |
-
"epoch": 0.09,
|
277 |
-
"learning_rate": 0.00018354248097099364,
|
278 |
-
"loss": 1.2086,
|
279 |
-
"step": 900
|
280 |
-
},
|
281 |
-
{
|
282 |
-
"epoch": 0.09,
|
283 |
-
"learning_rate": 0.00018313104299526846,
|
284 |
-
"loss": 1.2999,
|
285 |
-
"step": 920
|
286 |
-
},
|
287 |
-
{
|
288 |
-
"epoch": 0.1,
|
289 |
-
"learning_rate": 0.0001827196050195433,
|
290 |
-
"loss": 1.2503,
|
291 |
-
"step": 940
|
292 |
-
},
|
293 |
-
{
|
294 |
-
"epoch": 0.1,
|
295 |
-
"learning_rate": 0.00018230816704381815,
|
296 |
-
"loss": 1.2466,
|
297 |
-
"step": 960
|
298 |
-
},
|
299 |
-
{
|
300 |
-
"epoch": 0.1,
|
301 |
-
"learning_rate": 0.00018189672906809297,
|
302 |
-
"loss": 1.2113,
|
303 |
-
"step": 980
|
304 |
-
},
|
305 |
-
{
|
306 |
-
"epoch": 0.1,
|
307 |
-
"learning_rate": 0.00018148529109236782,
|
308 |
-
"loss": 1.2356,
|
309 |
-
"step": 1000
|
310 |
-
},
|
311 |
-
{
|
312 |
-
"epoch": 0.1,
|
313 |
-
"learning_rate": 0.00018107385311664267,
|
314 |
-
"loss": 1.2631,
|
315 |
-
"step": 1020
|
316 |
-
},
|
317 |
-
{
|
318 |
-
"epoch": 0.11,
|
319 |
-
"learning_rate": 0.00018066241514091752,
|
320 |
-
"loss": 1.2443,
|
321 |
-
"step": 1040
|
322 |
-
},
|
323 |
-
{
|
324 |
-
"epoch": 0.11,
|
325 |
-
"learning_rate": 0.00018025097716519234,
|
326 |
-
"loss": 1.2406,
|
327 |
-
"step": 1060
|
328 |
-
},
|
329 |
-
{
|
330 |
-
"epoch": 0.11,
|
331 |
-
"learning_rate": 0.0001798395391894672,
|
332 |
-
"loss": 1.2308,
|
333 |
-
"step": 1080
|
334 |
-
},
|
335 |
-
{
|
336 |
-
"epoch": 0.11,
|
337 |
-
"learning_rate": 0.00017942810121374204,
|
338 |
-
"loss": 1.2649,
|
339 |
-
"step": 1100
|
340 |
-
},
|
341 |
-
{
|
342 |
-
"epoch": 0.11,
|
343 |
-
"learning_rate": 0.00017901666323801686,
|
344 |
-
"loss": 1.2263,
|
345 |
-
"step": 1120
|
346 |
-
},
|
347 |
-
{
|
348 |
-
"epoch": 0.12,
|
349 |
-
"learning_rate": 0.0001786052252622917,
|
350 |
-
"loss": 1.2869,
|
351 |
-
"step": 1140
|
352 |
-
},
|
353 |
-
{
|
354 |
-
"epoch": 0.12,
|
355 |
-
"learning_rate": 0.00017819378728656655,
|
356 |
-
"loss": 1.2255,
|
357 |
-
"step": 1160
|
358 |
-
},
|
359 |
-
{
|
360 |
-
"epoch": 0.12,
|
361 |
-
"learning_rate": 0.0001777823493108414,
|
362 |
-
"loss": 1.2596,
|
363 |
-
"step": 1180
|
364 |
-
},
|
365 |
-
{
|
366 |
-
"epoch": 0.12,
|
367 |
-
"learning_rate": 0.00017737091133511622,
|
368 |
-
"loss": 1.2748,
|
369 |
-
"step": 1200
|
370 |
-
},
|
371 |
-
{
|
372 |
-
"epoch": 0.12,
|
373 |
-
"learning_rate": 0.00017695947335939107,
|
374 |
-
"loss": 1.2587,
|
375 |
-
"step": 1220
|
376 |
-
},
|
377 |
-
{
|
378 |
-
"epoch": 0.13,
|
379 |
-
"learning_rate": 0.00017654803538366592,
|
380 |
-
"loss": 1.2651,
|
381 |
-
"step": 1240
|
382 |
-
},
|
383 |
-
{
|
384 |
-
"epoch": 0.13,
|
385 |
-
"learning_rate": 0.00017613659740794074,
|
386 |
-
"loss": 1.2659,
|
387 |
-
"step": 1260
|
388 |
-
},
|
389 |
-
{
|
390 |
-
"epoch": 0.13,
|
391 |
-
"learning_rate": 0.0001757251594322156,
|
392 |
-
"loss": 1.2077,
|
393 |
-
"step": 1280
|
394 |
-
},
|
395 |
-
{
|
396 |
-
"epoch": 0.13,
|
397 |
-
"learning_rate": 0.00017531372145649044,
|
398 |
-
"loss": 1.25,
|
399 |
-
"step": 1300
|
400 |
-
},
|
401 |
-
{
|
402 |
-
"epoch": 0.13,
|
403 |
-
"learning_rate": 0.00017490228348076528,
|
404 |
-
"loss": 1.2149,
|
405 |
-
"step": 1320
|
406 |
-
},
|
407 |
-
{
|
408 |
-
"epoch": 0.14,
|
409 |
-
"learning_rate": 0.0001744908455050401,
|
410 |
-
"loss": 1.2417,
|
411 |
-
"step": 1340
|
412 |
-
},
|
413 |
-
{
|
414 |
-
"epoch": 0.14,
|
415 |
-
"learning_rate": 0.00017407940752931498,
|
416 |
-
"loss": 1.1939,
|
417 |
-
"step": 1360
|
418 |
-
},
|
419 |
-
{
|
420 |
-
"epoch": 0.14,
|
421 |
-
"learning_rate": 0.00017366796955358983,
|
422 |
-
"loss": 1.2688,
|
423 |
-
"step": 1380
|
424 |
-
},
|
425 |
-
{
|
426 |
-
"epoch": 0.14,
|
427 |
-
"learning_rate": 0.00017325653157786465,
|
428 |
-
"loss": 1.2287,
|
429 |
-
"step": 1400
|
430 |
-
},
|
431 |
-
{
|
432 |
-
"epoch": 0.14,
|
433 |
-
"learning_rate": 0.0001728450936021395,
|
434 |
-
"loss": 1.2931,
|
435 |
-
"step": 1420
|
436 |
-
},
|
437 |
-
{
|
438 |
-
"epoch": 0.15,
|
439 |
-
"learning_rate": 0.00017243365562641435,
|
440 |
-
"loss": 1.2695,
|
441 |
-
"step": 1440
|
442 |
-
},
|
443 |
-
{
|
444 |
-
"epoch": 0.15,
|
445 |
-
"learning_rate": 0.00017202221765068917,
|
446 |
-
"loss": 1.2228,
|
447 |
-
"step": 1460
|
448 |
-
},
|
449 |
-
{
|
450 |
-
"epoch": 0.15,
|
451 |
-
"learning_rate": 0.00017161077967496401,
|
452 |
-
"loss": 1.2419,
|
453 |
-
"step": 1480
|
454 |
-
},
|
455 |
-
{
|
456 |
-
"epoch": 0.15,
|
457 |
-
"learning_rate": 0.00017119934169923886,
|
458 |
-
"loss": 1.2483,
|
459 |
-
"step": 1500
|
460 |
-
},
|
461 |
-
{
|
462 |
-
"epoch": 0.15,
|
463 |
-
"learning_rate": 0.0001707879037235137,
|
464 |
-
"loss": 1.2144,
|
465 |
-
"step": 1520
|
466 |
-
},
|
467 |
-
{
|
468 |
-
"epoch": 0.16,
|
469 |
-
"learning_rate": 0.00017037646574778853,
|
470 |
-
"loss": 1.2148,
|
471 |
-
"step": 1540
|
472 |
-
},
|
473 |
-
{
|
474 |
-
"epoch": 0.16,
|
475 |
-
"learning_rate": 0.00016996502777206338,
|
476 |
-
"loss": 1.2196,
|
477 |
-
"step": 1560
|
478 |
-
},
|
479 |
-
{
|
480 |
-
"epoch": 0.16,
|
481 |
-
"learning_rate": 0.00016955358979633823,
|
482 |
-
"loss": 1.2581,
|
483 |
-
"step": 1580
|
484 |
-
},
|
485 |
-
{
|
486 |
-
"epoch": 0.16,
|
487 |
-
"learning_rate": 0.00016914215182061305,
|
488 |
-
"loss": 1.2414,
|
489 |
-
"step": 1600
|
490 |
-
},
|
491 |
-
{
|
492 |
-
"epoch": 0.16,
|
493 |
-
"learning_rate": 0.0001687307138448879,
|
494 |
-
"loss": 1.2357,
|
495 |
-
"step": 1620
|
496 |
-
},
|
497 |
-
{
|
498 |
-
"epoch": 0.17,
|
499 |
-
"learning_rate": 0.00016831927586916274,
|
500 |
-
"loss": 1.22,
|
501 |
-
"step": 1640
|
502 |
-
},
|
503 |
-
{
|
504 |
-
"epoch": 0.17,
|
505 |
-
"learning_rate": 0.0001679078378934376,
|
506 |
-
"loss": 1.2234,
|
507 |
-
"step": 1660
|
508 |
-
},
|
509 |
-
{
|
510 |
-
"epoch": 0.17,
|
511 |
-
"learning_rate": 0.00016749639991771241,
|
512 |
-
"loss": 1.2291,
|
513 |
-
"step": 1680
|
514 |
-
},
|
515 |
-
{
|
516 |
-
"epoch": 0.17,
|
517 |
-
"learning_rate": 0.00016708496194198726,
|
518 |
-
"loss": 1.2344,
|
519 |
-
"step": 1700
|
520 |
-
},
|
521 |
-
{
|
522 |
-
"epoch": 0.18,
|
523 |
-
"learning_rate": 0.0001666735239662621,
|
524 |
-
"loss": 1.1987,
|
525 |
-
"step": 1720
|
526 |
-
},
|
527 |
-
{
|
528 |
-
"epoch": 0.18,
|
529 |
-
"learning_rate": 0.00016626208599053693,
|
530 |
-
"loss": 1.2232,
|
531 |
-
"step": 1740
|
532 |
-
},
|
533 |
-
{
|
534 |
-
"epoch": 0.18,
|
535 |
-
"learning_rate": 0.00016585064801481178,
|
536 |
-
"loss": 1.2402,
|
537 |
-
"step": 1760
|
538 |
-
},
|
539 |
-
{
|
540 |
-
"epoch": 0.18,
|
541 |
-
"learning_rate": 0.00016543921003908663,
|
542 |
-
"loss": 1.1475,
|
543 |
-
"step": 1780
|
544 |
-
},
|
545 |
-
{
|
546 |
-
"epoch": 0.18,
|
547 |
-
"learning_rate": 0.00016502777206336148,
|
548 |
-
"loss": 1.272,
|
549 |
-
"step": 1800
|
550 |
-
},
|
551 |
-
{
|
552 |
-
"epoch": 0.19,
|
553 |
-
"learning_rate": 0.0001646163340876363,
|
554 |
-
"loss": 1.2369,
|
555 |
-
"step": 1820
|
556 |
-
},
|
557 |
-
{
|
558 |
-
"epoch": 0.19,
|
559 |
-
"learning_rate": 0.00016420489611191114,
|
560 |
-
"loss": 1.212,
|
561 |
-
"step": 1840
|
562 |
-
},
|
563 |
-
{
|
564 |
-
"epoch": 0.19,
|
565 |
-
"learning_rate": 0.000163793458136186,
|
566 |
-
"loss": 1.2191,
|
567 |
-
"step": 1860
|
568 |
-
},
|
569 |
-
{
|
570 |
-
"epoch": 0.19,
|
571 |
-
"learning_rate": 0.00016338202016046081,
|
572 |
-
"loss": 1.1953,
|
573 |
-
"step": 1880
|
574 |
-
},
|
575 |
-
{
|
576 |
-
"epoch": 0.19,
|
577 |
-
"learning_rate": 0.00016297058218473566,
|
578 |
-
"loss": 1.2126,
|
579 |
-
"step": 1900
|
580 |
-
},
|
581 |
-
{
|
582 |
-
"epoch": 0.2,
|
583 |
-
"learning_rate": 0.0001625591442090105,
|
584 |
-
"loss": 1.2396,
|
585 |
-
"step": 1920
|
586 |
-
},
|
587 |
-
{
|
588 |
-
"epoch": 0.2,
|
589 |
-
"learning_rate": 0.00016214770623328536,
|
590 |
-
"loss": 1.1468,
|
591 |
-
"step": 1940
|
592 |
-
},
|
593 |
-
{
|
594 |
-
"epoch": 0.2,
|
595 |
-
"learning_rate": 0.00016173626825756018,
|
596 |
-
"loss": 1.1847,
|
597 |
-
"step": 1960
|
598 |
-
},
|
599 |
-
{
|
600 |
-
"epoch": 0.2,
|
601 |
-
"learning_rate": 0.00016132483028183503,
|
602 |
-
"loss": 1.2214,
|
603 |
-
"step": 1980
|
604 |
-
},
|
605 |
-
{
|
606 |
-
"epoch": 0.2,
|
607 |
-
"learning_rate": 0.00016091339230610988,
|
608 |
-
"loss": 1.2138,
|
609 |
-
"step": 2000
|
610 |
-
}
|
611 |
-
],
|
612 |
-
"logging_steps": 20,
|
613 |
-
"max_steps": 9822,
|
614 |
-
"num_train_epochs": 1,
|
615 |
-
"save_steps": 500,
|
616 |
-
"total_flos": 2.908887907648512e+16,
|
617 |
-
"trial_name": null,
|
618 |
-
"trial_params": null
|
619 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2000/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:8819fc087370c0c0dd1869922822cf7a5ebe84fa7a7194c69a0ec917ff22569b
|
3 |
-
size 4027
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/README.md
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
---
|
2 |
-
library_name: peft
|
3 |
-
---
|
4 |
-
## Training procedure
|
5 |
-
|
6 |
-
|
7 |
-
The following `bitsandbytes` quantization config was used during training:
|
8 |
-
- quant_method: QuantizationMethod.BITS_AND_BYTES
|
9 |
-
- load_in_8bit: False
|
10 |
-
- load_in_4bit: True
|
11 |
-
- llm_int8_threshold: 6.0
|
12 |
-
- llm_int8_skip_modules: None
|
13 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
14 |
-
- llm_int8_has_fp16_weight: False
|
15 |
-
- bnb_4bit_quant_type: nf4
|
16 |
-
- bnb_4bit_use_double_quant: True
|
17 |
-
- bnb_4bit_compute_dtype: bfloat16
|
18 |
-
### Framework versions
|
19 |
-
|
20 |
-
|
21 |
-
- PEFT 0.4.0.dev0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/adapter_config.json
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"base_model_name_or_path": "codellama/CodeLlama-13b-Instruct-hf",
|
3 |
-
"bias": "none",
|
4 |
-
"fan_in_fan_out": false,
|
5 |
-
"inference_mode": true,
|
6 |
-
"init_lora_weights": true,
|
7 |
-
"layers_pattern": null,
|
8 |
-
"layers_to_transform": null,
|
9 |
-
"lora_alpha": 32,
|
10 |
-
"lora_dropout": 0.05,
|
11 |
-
"modules_to_save": null,
|
12 |
-
"peft_type": "LORA",
|
13 |
-
"r": 8,
|
14 |
-
"revision": null,
|
15 |
-
"target_modules": [
|
16 |
-
"q_proj",
|
17 |
-
"k_proj",
|
18 |
-
"v_proj"
|
19 |
-
],
|
20 |
-
"task_type": "CAUSAL_LM"
|
21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/adapter_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:73b3f2adc6f6374cde7c108b031e0e6e67250ba8ce32edff87c930ac2bc8a9b6
|
3 |
-
size 39407821
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:eef06d7a537ff69f8b6e232bc2afa874c4db9e9d988185bd577ae9f68571c70c
|
3 |
-
size 78844165
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:3c809d18b15e0dc9b961ac3ae55d2aeb3ae3e9cb845b4d706a495fc7c322a705
|
3 |
-
size 14575
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:0c679b8c55ca25ae4648665ff45322feeafc7eb782af9af7326fcad2a0a90886
|
3 |
-
size 627
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/trainer_state.json
DELETED
@@ -1,769 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.2545274062384667,
|
5 |
-
"eval_steps": 500,
|
6 |
-
"global_step": 2500,
|
7 |
-
"is_hyper_param_search": false,
|
8 |
-
"is_local_process_zero": true,
|
9 |
-
"is_world_process_zero": true,
|
10 |
-
"log_history": [
|
11 |
-
{
|
12 |
-
"epoch": 0.0,
|
13 |
-
"learning_rate": 4e-05,
|
14 |
-
"loss": 3.5766,
|
15 |
-
"step": 20
|
16 |
-
},
|
17 |
-
{
|
18 |
-
"epoch": 0.0,
|
19 |
-
"learning_rate": 8e-05,
|
20 |
-
"loss": 2.9038,
|
21 |
-
"step": 40
|
22 |
-
},
|
23 |
-
{
|
24 |
-
"epoch": 0.01,
|
25 |
-
"learning_rate": 0.00012,
|
26 |
-
"loss": 1.9072,
|
27 |
-
"step": 60
|
28 |
-
},
|
29 |
-
{
|
30 |
-
"epoch": 0.01,
|
31 |
-
"learning_rate": 0.00016,
|
32 |
-
"loss": 1.7359,
|
33 |
-
"step": 80
|
34 |
-
},
|
35 |
-
{
|
36 |
-
"epoch": 0.01,
|
37 |
-
"learning_rate": 0.0002,
|
38 |
-
"loss": 1.5349,
|
39 |
-
"step": 100
|
40 |
-
},
|
41 |
-
{
|
42 |
-
"epoch": 0.01,
|
43 |
-
"learning_rate": 0.00019958856202427486,
|
44 |
-
"loss": 1.4908,
|
45 |
-
"step": 120
|
46 |
-
},
|
47 |
-
{
|
48 |
-
"epoch": 0.01,
|
49 |
-
"learning_rate": 0.00019917712404854968,
|
50 |
-
"loss": 1.4931,
|
51 |
-
"step": 140
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"epoch": 0.02,
|
55 |
-
"learning_rate": 0.00019876568607282453,
|
56 |
-
"loss": 1.4201,
|
57 |
-
"step": 160
|
58 |
-
},
|
59 |
-
{
|
60 |
-
"epoch": 0.02,
|
61 |
-
"learning_rate": 0.00019835424809709937,
|
62 |
-
"loss": 1.4541,
|
63 |
-
"step": 180
|
64 |
-
},
|
65 |
-
{
|
66 |
-
"epoch": 0.02,
|
67 |
-
"learning_rate": 0.00019794281012137422,
|
68 |
-
"loss": 1.4102,
|
69 |
-
"step": 200
|
70 |
-
},
|
71 |
-
{
|
72 |
-
"epoch": 0.02,
|
73 |
-
"learning_rate": 0.00019753137214564904,
|
74 |
-
"loss": 1.3861,
|
75 |
-
"step": 220
|
76 |
-
},
|
77 |
-
{
|
78 |
-
"epoch": 0.02,
|
79 |
-
"learning_rate": 0.0001971199341699239,
|
80 |
-
"loss": 1.3939,
|
81 |
-
"step": 240
|
82 |
-
},
|
83 |
-
{
|
84 |
-
"epoch": 0.03,
|
85 |
-
"learning_rate": 0.00019670849619419874,
|
86 |
-
"loss": 1.3883,
|
87 |
-
"step": 260
|
88 |
-
},
|
89 |
-
{
|
90 |
-
"epoch": 0.03,
|
91 |
-
"learning_rate": 0.00019629705821847356,
|
92 |
-
"loss": 1.3257,
|
93 |
-
"step": 280
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"epoch": 0.03,
|
97 |
-
"learning_rate": 0.0001958856202427484,
|
98 |
-
"loss": 1.386,
|
99 |
-
"step": 300
|
100 |
-
},
|
101 |
-
{
|
102 |
-
"epoch": 0.03,
|
103 |
-
"learning_rate": 0.00019547418226702326,
|
104 |
-
"loss": 1.3746,
|
105 |
-
"step": 320
|
106 |
-
},
|
107 |
-
{
|
108 |
-
"epoch": 0.03,
|
109 |
-
"learning_rate": 0.0001950627442912981,
|
110 |
-
"loss": 1.3266,
|
111 |
-
"step": 340
|
112 |
-
},
|
113 |
-
{
|
114 |
-
"epoch": 0.04,
|
115 |
-
"learning_rate": 0.00019465130631557293,
|
116 |
-
"loss": 1.3591,
|
117 |
-
"step": 360
|
118 |
-
},
|
119 |
-
{
|
120 |
-
"epoch": 0.04,
|
121 |
-
"learning_rate": 0.00019423986833984777,
|
122 |
-
"loss": 1.3464,
|
123 |
-
"step": 380
|
124 |
-
},
|
125 |
-
{
|
126 |
-
"epoch": 0.04,
|
127 |
-
"learning_rate": 0.00019382843036412262,
|
128 |
-
"loss": 1.3573,
|
129 |
-
"step": 400
|
130 |
-
},
|
131 |
-
{
|
132 |
-
"epoch": 0.04,
|
133 |
-
"learning_rate": 0.00019341699238839744,
|
134 |
-
"loss": 1.3455,
|
135 |
-
"step": 420
|
136 |
-
},
|
137 |
-
{
|
138 |
-
"epoch": 0.04,
|
139 |
-
"learning_rate": 0.0001930055544126723,
|
140 |
-
"loss": 1.3223,
|
141 |
-
"step": 440
|
142 |
-
},
|
143 |
-
{
|
144 |
-
"epoch": 0.05,
|
145 |
-
"learning_rate": 0.00019259411643694714,
|
146 |
-
"loss": 1.3293,
|
147 |
-
"step": 460
|
148 |
-
},
|
149 |
-
{
|
150 |
-
"epoch": 0.05,
|
151 |
-
"learning_rate": 0.000192182678461222,
|
152 |
-
"loss": 1.3033,
|
153 |
-
"step": 480
|
154 |
-
},
|
155 |
-
{
|
156 |
-
"epoch": 0.05,
|
157 |
-
"learning_rate": 0.0001917712404854968,
|
158 |
-
"loss": 1.3323,
|
159 |
-
"step": 500
|
160 |
-
},
|
161 |
-
{
|
162 |
-
"epoch": 0.05,
|
163 |
-
"learning_rate": 0.00019135980250977166,
|
164 |
-
"loss": 1.2838,
|
165 |
-
"step": 520
|
166 |
-
},
|
167 |
-
{
|
168 |
-
"epoch": 0.05,
|
169 |
-
"learning_rate": 0.0001909483645340465,
|
170 |
-
"loss": 1.332,
|
171 |
-
"step": 540
|
172 |
-
},
|
173 |
-
{
|
174 |
-
"epoch": 0.06,
|
175 |
-
"learning_rate": 0.00019053692655832133,
|
176 |
-
"loss": 1.3118,
|
177 |
-
"step": 560
|
178 |
-
},
|
179 |
-
{
|
180 |
-
"epoch": 0.06,
|
181 |
-
"learning_rate": 0.00019012548858259617,
|
182 |
-
"loss": 1.3237,
|
183 |
-
"step": 580
|
184 |
-
},
|
185 |
-
{
|
186 |
-
"epoch": 0.06,
|
187 |
-
"learning_rate": 0.00018971405060687102,
|
188 |
-
"loss": 1.2541,
|
189 |
-
"step": 600
|
190 |
-
},
|
191 |
-
{
|
192 |
-
"epoch": 0.06,
|
193 |
-
"learning_rate": 0.00018930261263114587,
|
194 |
-
"loss": 1.28,
|
195 |
-
"step": 620
|
196 |
-
},
|
197 |
-
{
|
198 |
-
"epoch": 0.07,
|
199 |
-
"learning_rate": 0.0001888911746554207,
|
200 |
-
"loss": 1.231,
|
201 |
-
"step": 640
|
202 |
-
},
|
203 |
-
{
|
204 |
-
"epoch": 0.07,
|
205 |
-
"learning_rate": 0.00018847973667969554,
|
206 |
-
"loss": 1.2676,
|
207 |
-
"step": 660
|
208 |
-
},
|
209 |
-
{
|
210 |
-
"epoch": 0.07,
|
211 |
-
"learning_rate": 0.0001880682987039704,
|
212 |
-
"loss": 1.2909,
|
213 |
-
"step": 680
|
214 |
-
},
|
215 |
-
{
|
216 |
-
"epoch": 0.07,
|
217 |
-
"learning_rate": 0.0001876568607282452,
|
218 |
-
"loss": 1.2499,
|
219 |
-
"step": 700
|
220 |
-
},
|
221 |
-
{
|
222 |
-
"epoch": 0.07,
|
223 |
-
"learning_rate": 0.00018724542275252006,
|
224 |
-
"loss": 1.2679,
|
225 |
-
"step": 720
|
226 |
-
},
|
227 |
-
{
|
228 |
-
"epoch": 0.08,
|
229 |
-
"learning_rate": 0.0001868339847767949,
|
230 |
-
"loss": 1.2674,
|
231 |
-
"step": 740
|
232 |
-
},
|
233 |
-
{
|
234 |
-
"epoch": 0.08,
|
235 |
-
"learning_rate": 0.00018642254680106975,
|
236 |
-
"loss": 1.2736,
|
237 |
-
"step": 760
|
238 |
-
},
|
239 |
-
{
|
240 |
-
"epoch": 0.08,
|
241 |
-
"learning_rate": 0.00018601110882534457,
|
242 |
-
"loss": 1.2843,
|
243 |
-
"step": 780
|
244 |
-
},
|
245 |
-
{
|
246 |
-
"epoch": 0.08,
|
247 |
-
"learning_rate": 0.00018559967084961942,
|
248 |
-
"loss": 1.281,
|
249 |
-
"step": 800
|
250 |
-
},
|
251 |
-
{
|
252 |
-
"epoch": 0.08,
|
253 |
-
"learning_rate": 0.00018518823287389427,
|
254 |
-
"loss": 1.3699,
|
255 |
-
"step": 820
|
256 |
-
},
|
257 |
-
{
|
258 |
-
"epoch": 0.09,
|
259 |
-
"learning_rate": 0.0001847767948981691,
|
260 |
-
"loss": 1.2705,
|
261 |
-
"step": 840
|
262 |
-
},
|
263 |
-
{
|
264 |
-
"epoch": 0.09,
|
265 |
-
"learning_rate": 0.00018436535692244394,
|
266 |
-
"loss": 1.2279,
|
267 |
-
"step": 860
|
268 |
-
},
|
269 |
-
{
|
270 |
-
"epoch": 0.09,
|
271 |
-
"learning_rate": 0.0001839539189467188,
|
272 |
-
"loss": 1.2779,
|
273 |
-
"step": 880
|
274 |
-
},
|
275 |
-
{
|
276 |
-
"epoch": 0.09,
|
277 |
-
"learning_rate": 0.00018354248097099364,
|
278 |
-
"loss": 1.2086,
|
279 |
-
"step": 900
|
280 |
-
},
|
281 |
-
{
|
282 |
-
"epoch": 0.09,
|
283 |
-
"learning_rate": 0.00018313104299526846,
|
284 |
-
"loss": 1.2999,
|
285 |
-
"step": 920
|
286 |
-
},
|
287 |
-
{
|
288 |
-
"epoch": 0.1,
|
289 |
-
"learning_rate": 0.0001827196050195433,
|
290 |
-
"loss": 1.2503,
|
291 |
-
"step": 940
|
292 |
-
},
|
293 |
-
{
|
294 |
-
"epoch": 0.1,
|
295 |
-
"learning_rate": 0.00018230816704381815,
|
296 |
-
"loss": 1.2466,
|
297 |
-
"step": 960
|
298 |
-
},
|
299 |
-
{
|
300 |
-
"epoch": 0.1,
|
301 |
-
"learning_rate": 0.00018189672906809297,
|
302 |
-
"loss": 1.2113,
|
303 |
-
"step": 980
|
304 |
-
},
|
305 |
-
{
|
306 |
-
"epoch": 0.1,
|
307 |
-
"learning_rate": 0.00018148529109236782,
|
308 |
-
"loss": 1.2356,
|
309 |
-
"step": 1000
|
310 |
-
},
|
311 |
-
{
|
312 |
-
"epoch": 0.1,
|
313 |
-
"learning_rate": 0.00018107385311664267,
|
314 |
-
"loss": 1.2631,
|
315 |
-
"step": 1020
|
316 |
-
},
|
317 |
-
{
|
318 |
-
"epoch": 0.11,
|
319 |
-
"learning_rate": 0.00018066241514091752,
|
320 |
-
"loss": 1.2443,
|
321 |
-
"step": 1040
|
322 |
-
},
|
323 |
-
{
|
324 |
-
"epoch": 0.11,
|
325 |
-
"learning_rate": 0.00018025097716519234,
|
326 |
-
"loss": 1.2406,
|
327 |
-
"step": 1060
|
328 |
-
},
|
329 |
-
{
|
330 |
-
"epoch": 0.11,
|
331 |
-
"learning_rate": 0.0001798395391894672,
|
332 |
-
"loss": 1.2308,
|
333 |
-
"step": 1080
|
334 |
-
},
|
335 |
-
{
|
336 |
-
"epoch": 0.11,
|
337 |
-
"learning_rate": 0.00017942810121374204,
|
338 |
-
"loss": 1.2649,
|
339 |
-
"step": 1100
|
340 |
-
},
|
341 |
-
{
|
342 |
-
"epoch": 0.11,
|
343 |
-
"learning_rate": 0.00017901666323801686,
|
344 |
-
"loss": 1.2263,
|
345 |
-
"step": 1120
|
346 |
-
},
|
347 |
-
{
|
348 |
-
"epoch": 0.12,
|
349 |
-
"learning_rate": 0.0001786052252622917,
|
350 |
-
"loss": 1.2869,
|
351 |
-
"step": 1140
|
352 |
-
},
|
353 |
-
{
|
354 |
-
"epoch": 0.12,
|
355 |
-
"learning_rate": 0.00017819378728656655,
|
356 |
-
"loss": 1.2255,
|
357 |
-
"step": 1160
|
358 |
-
},
|
359 |
-
{
|
360 |
-
"epoch": 0.12,
|
361 |
-
"learning_rate": 0.0001777823493108414,
|
362 |
-
"loss": 1.2596,
|
363 |
-
"step": 1180
|
364 |
-
},
|
365 |
-
{
|
366 |
-
"epoch": 0.12,
|
367 |
-
"learning_rate": 0.00017737091133511622,
|
368 |
-
"loss": 1.2748,
|
369 |
-
"step": 1200
|
370 |
-
},
|
371 |
-
{
|
372 |
-
"epoch": 0.12,
|
373 |
-
"learning_rate": 0.00017695947335939107,
|
374 |
-
"loss": 1.2587,
|
375 |
-
"step": 1220
|
376 |
-
},
|
377 |
-
{
|
378 |
-
"epoch": 0.13,
|
379 |
-
"learning_rate": 0.00017654803538366592,
|
380 |
-
"loss": 1.2651,
|
381 |
-
"step": 1240
|
382 |
-
},
|
383 |
-
{
|
384 |
-
"epoch": 0.13,
|
385 |
-
"learning_rate": 0.00017613659740794074,
|
386 |
-
"loss": 1.2659,
|
387 |
-
"step": 1260
|
388 |
-
},
|
389 |
-
{
|
390 |
-
"epoch": 0.13,
|
391 |
-
"learning_rate": 0.0001757251594322156,
|
392 |
-
"loss": 1.2077,
|
393 |
-
"step": 1280
|
394 |
-
},
|
395 |
-
{
|
396 |
-
"epoch": 0.13,
|
397 |
-
"learning_rate": 0.00017531372145649044,
|
398 |
-
"loss": 1.25,
|
399 |
-
"step": 1300
|
400 |
-
},
|
401 |
-
{
|
402 |
-
"epoch": 0.13,
|
403 |
-
"learning_rate": 0.00017490228348076528,
|
404 |
-
"loss": 1.2149,
|
405 |
-
"step": 1320
|
406 |
-
},
|
407 |
-
{
|
408 |
-
"epoch": 0.14,
|
409 |
-
"learning_rate": 0.0001744908455050401,
|
410 |
-
"loss": 1.2417,
|
411 |
-
"step": 1340
|
412 |
-
},
|
413 |
-
{
|
414 |
-
"epoch": 0.14,
|
415 |
-
"learning_rate": 0.00017407940752931498,
|
416 |
-
"loss": 1.1939,
|
417 |
-
"step": 1360
|
418 |
-
},
|
419 |
-
{
|
420 |
-
"epoch": 0.14,
|
421 |
-
"learning_rate": 0.00017366796955358983,
|
422 |
-
"loss": 1.2688,
|
423 |
-
"step": 1380
|
424 |
-
},
|
425 |
-
{
|
426 |
-
"epoch": 0.14,
|
427 |
-
"learning_rate": 0.00017325653157786465,
|
428 |
-
"loss": 1.2287,
|
429 |
-
"step": 1400
|
430 |
-
},
|
431 |
-
{
|
432 |
-
"epoch": 0.14,
|
433 |
-
"learning_rate": 0.0001728450936021395,
|
434 |
-
"loss": 1.2931,
|
435 |
-
"step": 1420
|
436 |
-
},
|
437 |
-
{
|
438 |
-
"epoch": 0.15,
|
439 |
-
"learning_rate": 0.00017243365562641435,
|
440 |
-
"loss": 1.2695,
|
441 |
-
"step": 1440
|
442 |
-
},
|
443 |
-
{
|
444 |
-
"epoch": 0.15,
|
445 |
-
"learning_rate": 0.00017202221765068917,
|
446 |
-
"loss": 1.2228,
|
447 |
-
"step": 1460
|
448 |
-
},
|
449 |
-
{
|
450 |
-
"epoch": 0.15,
|
451 |
-
"learning_rate": 0.00017161077967496401,
|
452 |
-
"loss": 1.2419,
|
453 |
-
"step": 1480
|
454 |
-
},
|
455 |
-
{
|
456 |
-
"epoch": 0.15,
|
457 |
-
"learning_rate": 0.00017119934169923886,
|
458 |
-
"loss": 1.2483,
|
459 |
-
"step": 1500
|
460 |
-
},
|
461 |
-
{
|
462 |
-
"epoch": 0.15,
|
463 |
-
"learning_rate": 0.0001707879037235137,
|
464 |
-
"loss": 1.2144,
|
465 |
-
"step": 1520
|
466 |
-
},
|
467 |
-
{
|
468 |
-
"epoch": 0.16,
|
469 |
-
"learning_rate": 0.00017037646574778853,
|
470 |
-
"loss": 1.2148,
|
471 |
-
"step": 1540
|
472 |
-
},
|
473 |
-
{
|
474 |
-
"epoch": 0.16,
|
475 |
-
"learning_rate": 0.00016996502777206338,
|
476 |
-
"loss": 1.2196,
|
477 |
-
"step": 1560
|
478 |
-
},
|
479 |
-
{
|
480 |
-
"epoch": 0.16,
|
481 |
-
"learning_rate": 0.00016955358979633823,
|
482 |
-
"loss": 1.2581,
|
483 |
-
"step": 1580
|
484 |
-
},
|
485 |
-
{
|
486 |
-
"epoch": 0.16,
|
487 |
-
"learning_rate": 0.00016914215182061305,
|
488 |
-
"loss": 1.2414,
|
489 |
-
"step": 1600
|
490 |
-
},
|
491 |
-
{
|
492 |
-
"epoch": 0.16,
|
493 |
-
"learning_rate": 0.0001687307138448879,
|
494 |
-
"loss": 1.2357,
|
495 |
-
"step": 1620
|
496 |
-
},
|
497 |
-
{
|
498 |
-
"epoch": 0.17,
|
499 |
-
"learning_rate": 0.00016831927586916274,
|
500 |
-
"loss": 1.22,
|
501 |
-
"step": 1640
|
502 |
-
},
|
503 |
-
{
|
504 |
-
"epoch": 0.17,
|
505 |
-
"learning_rate": 0.0001679078378934376,
|
506 |
-
"loss": 1.2234,
|
507 |
-
"step": 1660
|
508 |
-
},
|
509 |
-
{
|
510 |
-
"epoch": 0.17,
|
511 |
-
"learning_rate": 0.00016749639991771241,
|
512 |
-
"loss": 1.2291,
|
513 |
-
"step": 1680
|
514 |
-
},
|
515 |
-
{
|
516 |
-
"epoch": 0.17,
|
517 |
-
"learning_rate": 0.00016708496194198726,
|
518 |
-
"loss": 1.2344,
|
519 |
-
"step": 1700
|
520 |
-
},
|
521 |
-
{
|
522 |
-
"epoch": 0.18,
|
523 |
-
"learning_rate": 0.0001666735239662621,
|
524 |
-
"loss": 1.1987,
|
525 |
-
"step": 1720
|
526 |
-
},
|
527 |
-
{
|
528 |
-
"epoch": 0.18,
|
529 |
-
"learning_rate": 0.00016626208599053693,
|
530 |
-
"loss": 1.2232,
|
531 |
-
"step": 1740
|
532 |
-
},
|
533 |
-
{
|
534 |
-
"epoch": 0.18,
|
535 |
-
"learning_rate": 0.00016585064801481178,
|
536 |
-
"loss": 1.2402,
|
537 |
-
"step": 1760
|
538 |
-
},
|
539 |
-
{
|
540 |
-
"epoch": 0.18,
|
541 |
-
"learning_rate": 0.00016543921003908663,
|
542 |
-
"loss": 1.1475,
|
543 |
-
"step": 1780
|
544 |
-
},
|
545 |
-
{
|
546 |
-
"epoch": 0.18,
|
547 |
-
"learning_rate": 0.00016502777206336148,
|
548 |
-
"loss": 1.272,
|
549 |
-
"step": 1800
|
550 |
-
},
|
551 |
-
{
|
552 |
-
"epoch": 0.19,
|
553 |
-
"learning_rate": 0.0001646163340876363,
|
554 |
-
"loss": 1.2369,
|
555 |
-
"step": 1820
|
556 |
-
},
|
557 |
-
{
|
558 |
-
"epoch": 0.19,
|
559 |
-
"learning_rate": 0.00016420489611191114,
|
560 |
-
"loss": 1.212,
|
561 |
-
"step": 1840
|
562 |
-
},
|
563 |
-
{
|
564 |
-
"epoch": 0.19,
|
565 |
-
"learning_rate": 0.000163793458136186,
|
566 |
-
"loss": 1.2191,
|
567 |
-
"step": 1860
|
568 |
-
},
|
569 |
-
{
|
570 |
-
"epoch": 0.19,
|
571 |
-
"learning_rate": 0.00016338202016046081,
|
572 |
-
"loss": 1.1953,
|
573 |
-
"step": 1880
|
574 |
-
},
|
575 |
-
{
|
576 |
-
"epoch": 0.19,
|
577 |
-
"learning_rate": 0.00016297058218473566,
|
578 |
-
"loss": 1.2126,
|
579 |
-
"step": 1900
|
580 |
-
},
|
581 |
-
{
|
582 |
-
"epoch": 0.2,
|
583 |
-
"learning_rate": 0.0001625591442090105,
|
584 |
-
"loss": 1.2396,
|
585 |
-
"step": 1920
|
586 |
-
},
|
587 |
-
{
|
588 |
-
"epoch": 0.2,
|
589 |
-
"learning_rate": 0.00016214770623328536,
|
590 |
-
"loss": 1.1468,
|
591 |
-
"step": 1940
|
592 |
-
},
|
593 |
-
{
|
594 |
-
"epoch": 0.2,
|
595 |
-
"learning_rate": 0.00016173626825756018,
|
596 |
-
"loss": 1.1847,
|
597 |
-
"step": 1960
|
598 |
-
},
|
599 |
-
{
|
600 |
-
"epoch": 0.2,
|
601 |
-
"learning_rate": 0.00016132483028183503,
|
602 |
-
"loss": 1.2214,
|
603 |
-
"step": 1980
|
604 |
-
},
|
605 |
-
{
|
606 |
-
"epoch": 0.2,
|
607 |
-
"learning_rate": 0.00016091339230610988,
|
608 |
-
"loss": 1.2138,
|
609 |
-
"step": 2000
|
610 |
-
},
|
611 |
-
{
|
612 |
-
"epoch": 0.21,
|
613 |
-
"learning_rate": 0.0001605019543303847,
|
614 |
-
"loss": 1.2158,
|
615 |
-
"step": 2020
|
616 |
-
},
|
617 |
-
{
|
618 |
-
"epoch": 0.21,
|
619 |
-
"learning_rate": 0.00016009051635465954,
|
620 |
-
"loss": 1.2065,
|
621 |
-
"step": 2040
|
622 |
-
},
|
623 |
-
{
|
624 |
-
"epoch": 0.21,
|
625 |
-
"learning_rate": 0.0001596790783789344,
|
626 |
-
"loss": 1.2048,
|
627 |
-
"step": 2060
|
628 |
-
},
|
629 |
-
{
|
630 |
-
"epoch": 0.21,
|
631 |
-
"learning_rate": 0.00015926764040320924,
|
632 |
-
"loss": 1.1904,
|
633 |
-
"step": 2080
|
634 |
-
},
|
635 |
-
{
|
636 |
-
"epoch": 0.21,
|
637 |
-
"learning_rate": 0.00015885620242748406,
|
638 |
-
"loss": 1.1872,
|
639 |
-
"step": 2100
|
640 |
-
},
|
641 |
-
{
|
642 |
-
"epoch": 0.22,
|
643 |
-
"learning_rate": 0.0001584447644517589,
|
644 |
-
"loss": 1.2087,
|
645 |
-
"step": 2120
|
646 |
-
},
|
647 |
-
{
|
648 |
-
"epoch": 0.22,
|
649 |
-
"learning_rate": 0.00015803332647603376,
|
650 |
-
"loss": 1.1909,
|
651 |
-
"step": 2140
|
652 |
-
},
|
653 |
-
{
|
654 |
-
"epoch": 0.22,
|
655 |
-
"learning_rate": 0.00015762188850030858,
|
656 |
-
"loss": 1.2609,
|
657 |
-
"step": 2160
|
658 |
-
},
|
659 |
-
{
|
660 |
-
"epoch": 0.22,
|
661 |
-
"learning_rate": 0.00015721045052458343,
|
662 |
-
"loss": 1.2083,
|
663 |
-
"step": 2180
|
664 |
-
},
|
665 |
-
{
|
666 |
-
"epoch": 0.22,
|
667 |
-
"learning_rate": 0.00015679901254885827,
|
668 |
-
"loss": 1.2205,
|
669 |
-
"step": 2200
|
670 |
-
},
|
671 |
-
{
|
672 |
-
"epoch": 0.23,
|
673 |
-
"learning_rate": 0.00015638757457313312,
|
674 |
-
"loss": 1.1966,
|
675 |
-
"step": 2220
|
676 |
-
},
|
677 |
-
{
|
678 |
-
"epoch": 0.23,
|
679 |
-
"learning_rate": 0.00015597613659740794,
|
680 |
-
"loss": 1.235,
|
681 |
-
"step": 2240
|
682 |
-
},
|
683 |
-
{
|
684 |
-
"epoch": 0.23,
|
685 |
-
"learning_rate": 0.0001555646986216828,
|
686 |
-
"loss": 1.2488,
|
687 |
-
"step": 2260
|
688 |
-
},
|
689 |
-
{
|
690 |
-
"epoch": 0.23,
|
691 |
-
"learning_rate": 0.00015515326064595764,
|
692 |
-
"loss": 1.2229,
|
693 |
-
"step": 2280
|
694 |
-
},
|
695 |
-
{
|
696 |
-
"epoch": 0.23,
|
697 |
-
"learning_rate": 0.00015474182267023246,
|
698 |
-
"loss": 1.1874,
|
699 |
-
"step": 2300
|
700 |
-
},
|
701 |
-
{
|
702 |
-
"epoch": 0.24,
|
703 |
-
"learning_rate": 0.0001543303846945073,
|
704 |
-
"loss": 1.1724,
|
705 |
-
"step": 2320
|
706 |
-
},
|
707 |
-
{
|
708 |
-
"epoch": 0.24,
|
709 |
-
"learning_rate": 0.00015391894671878216,
|
710 |
-
"loss": 1.1786,
|
711 |
-
"step": 2340
|
712 |
-
},
|
713 |
-
{
|
714 |
-
"epoch": 0.24,
|
715 |
-
"learning_rate": 0.000153507508743057,
|
716 |
-
"loss": 1.1913,
|
717 |
-
"step": 2360
|
718 |
-
},
|
719 |
-
{
|
720 |
-
"epoch": 0.24,
|
721 |
-
"learning_rate": 0.00015309607076733183,
|
722 |
-
"loss": 1.2138,
|
723 |
-
"step": 2380
|
724 |
-
},
|
725 |
-
{
|
726 |
-
"epoch": 0.24,
|
727 |
-
"learning_rate": 0.00015268463279160667,
|
728 |
-
"loss": 1.1954,
|
729 |
-
"step": 2400
|
730 |
-
},
|
731 |
-
{
|
732 |
-
"epoch": 0.25,
|
733 |
-
"learning_rate": 0.00015227319481588152,
|
734 |
-
"loss": 1.1926,
|
735 |
-
"step": 2420
|
736 |
-
},
|
737 |
-
{
|
738 |
-
"epoch": 0.25,
|
739 |
-
"learning_rate": 0.00015186175684015634,
|
740 |
-
"loss": 1.1893,
|
741 |
-
"step": 2440
|
742 |
-
},
|
743 |
-
{
|
744 |
-
"epoch": 0.25,
|
745 |
-
"learning_rate": 0.0001514503188644312,
|
746 |
-
"loss": 1.2152,
|
747 |
-
"step": 2460
|
748 |
-
},
|
749 |
-
{
|
750 |
-
"epoch": 0.25,
|
751 |
-
"learning_rate": 0.00015103888088870604,
|
752 |
-
"loss": 1.199,
|
753 |
-
"step": 2480
|
754 |
-
},
|
755 |
-
{
|
756 |
-
"epoch": 0.25,
|
757 |
-
"learning_rate": 0.0001506274429129809,
|
758 |
-
"loss": 1.1966,
|
759 |
-
"step": 2500
|
760 |
-
}
|
761 |
-
],
|
762 |
-
"logging_steps": 20,
|
763 |
-
"max_steps": 9822,
|
764 |
-
"num_train_epochs": 1,
|
765 |
-
"save_steps": 500,
|
766 |
-
"total_flos": 3.630490997971968e+16,
|
767 |
-
"trial_name": null,
|
768 |
-
"trial_params": null
|
769 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-2500/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:8819fc087370c0c0dd1869922822cf7a5ebe84fa7a7194c69a0ec917ff22569b
|
3 |
-
size 4027
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/README.md
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
---
|
2 |
-
library_name: peft
|
3 |
-
---
|
4 |
-
## Training procedure
|
5 |
-
|
6 |
-
|
7 |
-
The following `bitsandbytes` quantization config was used during training:
|
8 |
-
- quant_method: QuantizationMethod.BITS_AND_BYTES
|
9 |
-
- load_in_8bit: False
|
10 |
-
- load_in_4bit: True
|
11 |
-
- llm_int8_threshold: 6.0
|
12 |
-
- llm_int8_skip_modules: None
|
13 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
14 |
-
- llm_int8_has_fp16_weight: False
|
15 |
-
- bnb_4bit_quant_type: nf4
|
16 |
-
- bnb_4bit_use_double_quant: True
|
17 |
-
- bnb_4bit_compute_dtype: bfloat16
|
18 |
-
### Framework versions
|
19 |
-
|
20 |
-
|
21 |
-
- PEFT 0.4.0.dev0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/adapter_config.json
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"base_model_name_or_path": "codellama/CodeLlama-13b-Instruct-hf",
|
3 |
-
"bias": "none",
|
4 |
-
"fan_in_fan_out": false,
|
5 |
-
"inference_mode": true,
|
6 |
-
"init_lora_weights": true,
|
7 |
-
"layers_pattern": null,
|
8 |
-
"layers_to_transform": null,
|
9 |
-
"lora_alpha": 32,
|
10 |
-
"lora_dropout": 0.05,
|
11 |
-
"modules_to_save": null,
|
12 |
-
"peft_type": "LORA",
|
13 |
-
"r": 8,
|
14 |
-
"revision": null,
|
15 |
-
"target_modules": [
|
16 |
-
"q_proj",
|
17 |
-
"k_proj",
|
18 |
-
"v_proj"
|
19 |
-
],
|
20 |
-
"task_type": "CAUSAL_LM"
|
21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/adapter_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f1cdf1a21aa36468ca7324817c18dbea0d9c993caad72356c80f67ad27c5adec
|
3 |
-
size 39407821
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:bc724ccb44ba7121ad649ed5a9f6f1072058b00e8e1ed4b66ffeff3b1cf5b498
|
3 |
-
size 78844165
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:0bfdf34f826fc1a3a954cb62126716372e4d6d4979b90192aa8b2d7bd0eb6172
|
3 |
-
size 14575
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:ca081c200d1f1cff74b76fe2637a142c241d77bc5919ab25bcff1d4293ebf505
|
3 |
-
size 627
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/trainer_state.json
DELETED
@@ -1,919 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.3054328874861601,
|
5 |
-
"eval_steps": 500,
|
6 |
-
"global_step": 3000,
|
7 |
-
"is_hyper_param_search": false,
|
8 |
-
"is_local_process_zero": true,
|
9 |
-
"is_world_process_zero": true,
|
10 |
-
"log_history": [
|
11 |
-
{
|
12 |
-
"epoch": 0.0,
|
13 |
-
"learning_rate": 4e-05,
|
14 |
-
"loss": 3.5766,
|
15 |
-
"step": 20
|
16 |
-
},
|
17 |
-
{
|
18 |
-
"epoch": 0.0,
|
19 |
-
"learning_rate": 8e-05,
|
20 |
-
"loss": 2.9038,
|
21 |
-
"step": 40
|
22 |
-
},
|
23 |
-
{
|
24 |
-
"epoch": 0.01,
|
25 |
-
"learning_rate": 0.00012,
|
26 |
-
"loss": 1.9072,
|
27 |
-
"step": 60
|
28 |
-
},
|
29 |
-
{
|
30 |
-
"epoch": 0.01,
|
31 |
-
"learning_rate": 0.00016,
|
32 |
-
"loss": 1.7359,
|
33 |
-
"step": 80
|
34 |
-
},
|
35 |
-
{
|
36 |
-
"epoch": 0.01,
|
37 |
-
"learning_rate": 0.0002,
|
38 |
-
"loss": 1.5349,
|
39 |
-
"step": 100
|
40 |
-
},
|
41 |
-
{
|
42 |
-
"epoch": 0.01,
|
43 |
-
"learning_rate": 0.00019958856202427486,
|
44 |
-
"loss": 1.4908,
|
45 |
-
"step": 120
|
46 |
-
},
|
47 |
-
{
|
48 |
-
"epoch": 0.01,
|
49 |
-
"learning_rate": 0.00019917712404854968,
|
50 |
-
"loss": 1.4931,
|
51 |
-
"step": 140
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"epoch": 0.02,
|
55 |
-
"learning_rate": 0.00019876568607282453,
|
56 |
-
"loss": 1.4201,
|
57 |
-
"step": 160
|
58 |
-
},
|
59 |
-
{
|
60 |
-
"epoch": 0.02,
|
61 |
-
"learning_rate": 0.00019835424809709937,
|
62 |
-
"loss": 1.4541,
|
63 |
-
"step": 180
|
64 |
-
},
|
65 |
-
{
|
66 |
-
"epoch": 0.02,
|
67 |
-
"learning_rate": 0.00019794281012137422,
|
68 |
-
"loss": 1.4102,
|
69 |
-
"step": 200
|
70 |
-
},
|
71 |
-
{
|
72 |
-
"epoch": 0.02,
|
73 |
-
"learning_rate": 0.00019753137214564904,
|
74 |
-
"loss": 1.3861,
|
75 |
-
"step": 220
|
76 |
-
},
|
77 |
-
{
|
78 |
-
"epoch": 0.02,
|
79 |
-
"learning_rate": 0.0001971199341699239,
|
80 |
-
"loss": 1.3939,
|
81 |
-
"step": 240
|
82 |
-
},
|
83 |
-
{
|
84 |
-
"epoch": 0.03,
|
85 |
-
"learning_rate": 0.00019670849619419874,
|
86 |
-
"loss": 1.3883,
|
87 |
-
"step": 260
|
88 |
-
},
|
89 |
-
{
|
90 |
-
"epoch": 0.03,
|
91 |
-
"learning_rate": 0.00019629705821847356,
|
92 |
-
"loss": 1.3257,
|
93 |
-
"step": 280
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"epoch": 0.03,
|
97 |
-
"learning_rate": 0.0001958856202427484,
|
98 |
-
"loss": 1.386,
|
99 |
-
"step": 300
|
100 |
-
},
|
101 |
-
{
|
102 |
-
"epoch": 0.03,
|
103 |
-
"learning_rate": 0.00019547418226702326,
|
104 |
-
"loss": 1.3746,
|
105 |
-
"step": 320
|
106 |
-
},
|
107 |
-
{
|
108 |
-
"epoch": 0.03,
|
109 |
-
"learning_rate": 0.0001950627442912981,
|
110 |
-
"loss": 1.3266,
|
111 |
-
"step": 340
|
112 |
-
},
|
113 |
-
{
|
114 |
-
"epoch": 0.04,
|
115 |
-
"learning_rate": 0.00019465130631557293,
|
116 |
-
"loss": 1.3591,
|
117 |
-
"step": 360
|
118 |
-
},
|
119 |
-
{
|
120 |
-
"epoch": 0.04,
|
121 |
-
"learning_rate": 0.00019423986833984777,
|
122 |
-
"loss": 1.3464,
|
123 |
-
"step": 380
|
124 |
-
},
|
125 |
-
{
|
126 |
-
"epoch": 0.04,
|
127 |
-
"learning_rate": 0.00019382843036412262,
|
128 |
-
"loss": 1.3573,
|
129 |
-
"step": 400
|
130 |
-
},
|
131 |
-
{
|
132 |
-
"epoch": 0.04,
|
133 |
-
"learning_rate": 0.00019341699238839744,
|
134 |
-
"loss": 1.3455,
|
135 |
-
"step": 420
|
136 |
-
},
|
137 |
-
{
|
138 |
-
"epoch": 0.04,
|
139 |
-
"learning_rate": 0.0001930055544126723,
|
140 |
-
"loss": 1.3223,
|
141 |
-
"step": 440
|
142 |
-
},
|
143 |
-
{
|
144 |
-
"epoch": 0.05,
|
145 |
-
"learning_rate": 0.00019259411643694714,
|
146 |
-
"loss": 1.3293,
|
147 |
-
"step": 460
|
148 |
-
},
|
149 |
-
{
|
150 |
-
"epoch": 0.05,
|
151 |
-
"learning_rate": 0.000192182678461222,
|
152 |
-
"loss": 1.3033,
|
153 |
-
"step": 480
|
154 |
-
},
|
155 |
-
{
|
156 |
-
"epoch": 0.05,
|
157 |
-
"learning_rate": 0.0001917712404854968,
|
158 |
-
"loss": 1.3323,
|
159 |
-
"step": 500
|
160 |
-
},
|
161 |
-
{
|
162 |
-
"epoch": 0.05,
|
163 |
-
"learning_rate": 0.00019135980250977166,
|
164 |
-
"loss": 1.2838,
|
165 |
-
"step": 520
|
166 |
-
},
|
167 |
-
{
|
168 |
-
"epoch": 0.05,
|
169 |
-
"learning_rate": 0.0001909483645340465,
|
170 |
-
"loss": 1.332,
|
171 |
-
"step": 540
|
172 |
-
},
|
173 |
-
{
|
174 |
-
"epoch": 0.06,
|
175 |
-
"learning_rate": 0.00019053692655832133,
|
176 |
-
"loss": 1.3118,
|
177 |
-
"step": 560
|
178 |
-
},
|
179 |
-
{
|
180 |
-
"epoch": 0.06,
|
181 |
-
"learning_rate": 0.00019012548858259617,
|
182 |
-
"loss": 1.3237,
|
183 |
-
"step": 580
|
184 |
-
},
|
185 |
-
{
|
186 |
-
"epoch": 0.06,
|
187 |
-
"learning_rate": 0.00018971405060687102,
|
188 |
-
"loss": 1.2541,
|
189 |
-
"step": 600
|
190 |
-
},
|
191 |
-
{
|
192 |
-
"epoch": 0.06,
|
193 |
-
"learning_rate": 0.00018930261263114587,
|
194 |
-
"loss": 1.28,
|
195 |
-
"step": 620
|
196 |
-
},
|
197 |
-
{
|
198 |
-
"epoch": 0.07,
|
199 |
-
"learning_rate": 0.0001888911746554207,
|
200 |
-
"loss": 1.231,
|
201 |
-
"step": 640
|
202 |
-
},
|
203 |
-
{
|
204 |
-
"epoch": 0.07,
|
205 |
-
"learning_rate": 0.00018847973667969554,
|
206 |
-
"loss": 1.2676,
|
207 |
-
"step": 660
|
208 |
-
},
|
209 |
-
{
|
210 |
-
"epoch": 0.07,
|
211 |
-
"learning_rate": 0.0001880682987039704,
|
212 |
-
"loss": 1.2909,
|
213 |
-
"step": 680
|
214 |
-
},
|
215 |
-
{
|
216 |
-
"epoch": 0.07,
|
217 |
-
"learning_rate": 0.0001876568607282452,
|
218 |
-
"loss": 1.2499,
|
219 |
-
"step": 700
|
220 |
-
},
|
221 |
-
{
|
222 |
-
"epoch": 0.07,
|
223 |
-
"learning_rate": 0.00018724542275252006,
|
224 |
-
"loss": 1.2679,
|
225 |
-
"step": 720
|
226 |
-
},
|
227 |
-
{
|
228 |
-
"epoch": 0.08,
|
229 |
-
"learning_rate": 0.0001868339847767949,
|
230 |
-
"loss": 1.2674,
|
231 |
-
"step": 740
|
232 |
-
},
|
233 |
-
{
|
234 |
-
"epoch": 0.08,
|
235 |
-
"learning_rate": 0.00018642254680106975,
|
236 |
-
"loss": 1.2736,
|
237 |
-
"step": 760
|
238 |
-
},
|
239 |
-
{
|
240 |
-
"epoch": 0.08,
|
241 |
-
"learning_rate": 0.00018601110882534457,
|
242 |
-
"loss": 1.2843,
|
243 |
-
"step": 780
|
244 |
-
},
|
245 |
-
{
|
246 |
-
"epoch": 0.08,
|
247 |
-
"learning_rate": 0.00018559967084961942,
|
248 |
-
"loss": 1.281,
|
249 |
-
"step": 800
|
250 |
-
},
|
251 |
-
{
|
252 |
-
"epoch": 0.08,
|
253 |
-
"learning_rate": 0.00018518823287389427,
|
254 |
-
"loss": 1.3699,
|
255 |
-
"step": 820
|
256 |
-
},
|
257 |
-
{
|
258 |
-
"epoch": 0.09,
|
259 |
-
"learning_rate": 0.0001847767948981691,
|
260 |
-
"loss": 1.2705,
|
261 |
-
"step": 840
|
262 |
-
},
|
263 |
-
{
|
264 |
-
"epoch": 0.09,
|
265 |
-
"learning_rate": 0.00018436535692244394,
|
266 |
-
"loss": 1.2279,
|
267 |
-
"step": 860
|
268 |
-
},
|
269 |
-
{
|
270 |
-
"epoch": 0.09,
|
271 |
-
"learning_rate": 0.0001839539189467188,
|
272 |
-
"loss": 1.2779,
|
273 |
-
"step": 880
|
274 |
-
},
|
275 |
-
{
|
276 |
-
"epoch": 0.09,
|
277 |
-
"learning_rate": 0.00018354248097099364,
|
278 |
-
"loss": 1.2086,
|
279 |
-
"step": 900
|
280 |
-
},
|
281 |
-
{
|
282 |
-
"epoch": 0.09,
|
283 |
-
"learning_rate": 0.00018313104299526846,
|
284 |
-
"loss": 1.2999,
|
285 |
-
"step": 920
|
286 |
-
},
|
287 |
-
{
|
288 |
-
"epoch": 0.1,
|
289 |
-
"learning_rate": 0.0001827196050195433,
|
290 |
-
"loss": 1.2503,
|
291 |
-
"step": 940
|
292 |
-
},
|
293 |
-
{
|
294 |
-
"epoch": 0.1,
|
295 |
-
"learning_rate": 0.00018230816704381815,
|
296 |
-
"loss": 1.2466,
|
297 |
-
"step": 960
|
298 |
-
},
|
299 |
-
{
|
300 |
-
"epoch": 0.1,
|
301 |
-
"learning_rate": 0.00018189672906809297,
|
302 |
-
"loss": 1.2113,
|
303 |
-
"step": 980
|
304 |
-
},
|
305 |
-
{
|
306 |
-
"epoch": 0.1,
|
307 |
-
"learning_rate": 0.00018148529109236782,
|
308 |
-
"loss": 1.2356,
|
309 |
-
"step": 1000
|
310 |
-
},
|
311 |
-
{
|
312 |
-
"epoch": 0.1,
|
313 |
-
"learning_rate": 0.00018107385311664267,
|
314 |
-
"loss": 1.2631,
|
315 |
-
"step": 1020
|
316 |
-
},
|
317 |
-
{
|
318 |
-
"epoch": 0.11,
|
319 |
-
"learning_rate": 0.00018066241514091752,
|
320 |
-
"loss": 1.2443,
|
321 |
-
"step": 1040
|
322 |
-
},
|
323 |
-
{
|
324 |
-
"epoch": 0.11,
|
325 |
-
"learning_rate": 0.00018025097716519234,
|
326 |
-
"loss": 1.2406,
|
327 |
-
"step": 1060
|
328 |
-
},
|
329 |
-
{
|
330 |
-
"epoch": 0.11,
|
331 |
-
"learning_rate": 0.0001798395391894672,
|
332 |
-
"loss": 1.2308,
|
333 |
-
"step": 1080
|
334 |
-
},
|
335 |
-
{
|
336 |
-
"epoch": 0.11,
|
337 |
-
"learning_rate": 0.00017942810121374204,
|
338 |
-
"loss": 1.2649,
|
339 |
-
"step": 1100
|
340 |
-
},
|
341 |
-
{
|
342 |
-
"epoch": 0.11,
|
343 |
-
"learning_rate": 0.00017901666323801686,
|
344 |
-
"loss": 1.2263,
|
345 |
-
"step": 1120
|
346 |
-
},
|
347 |
-
{
|
348 |
-
"epoch": 0.12,
|
349 |
-
"learning_rate": 0.0001786052252622917,
|
350 |
-
"loss": 1.2869,
|
351 |
-
"step": 1140
|
352 |
-
},
|
353 |
-
{
|
354 |
-
"epoch": 0.12,
|
355 |
-
"learning_rate": 0.00017819378728656655,
|
356 |
-
"loss": 1.2255,
|
357 |
-
"step": 1160
|
358 |
-
},
|
359 |
-
{
|
360 |
-
"epoch": 0.12,
|
361 |
-
"learning_rate": 0.0001777823493108414,
|
362 |
-
"loss": 1.2596,
|
363 |
-
"step": 1180
|
364 |
-
},
|
365 |
-
{
|
366 |
-
"epoch": 0.12,
|
367 |
-
"learning_rate": 0.00017737091133511622,
|
368 |
-
"loss": 1.2748,
|
369 |
-
"step": 1200
|
370 |
-
},
|
371 |
-
{
|
372 |
-
"epoch": 0.12,
|
373 |
-
"learning_rate": 0.00017695947335939107,
|
374 |
-
"loss": 1.2587,
|
375 |
-
"step": 1220
|
376 |
-
},
|
377 |
-
{
|
378 |
-
"epoch": 0.13,
|
379 |
-
"learning_rate": 0.00017654803538366592,
|
380 |
-
"loss": 1.2651,
|
381 |
-
"step": 1240
|
382 |
-
},
|
383 |
-
{
|
384 |
-
"epoch": 0.13,
|
385 |
-
"learning_rate": 0.00017613659740794074,
|
386 |
-
"loss": 1.2659,
|
387 |
-
"step": 1260
|
388 |
-
},
|
389 |
-
{
|
390 |
-
"epoch": 0.13,
|
391 |
-
"learning_rate": 0.0001757251594322156,
|
392 |
-
"loss": 1.2077,
|
393 |
-
"step": 1280
|
394 |
-
},
|
395 |
-
{
|
396 |
-
"epoch": 0.13,
|
397 |
-
"learning_rate": 0.00017531372145649044,
|
398 |
-
"loss": 1.25,
|
399 |
-
"step": 1300
|
400 |
-
},
|
401 |
-
{
|
402 |
-
"epoch": 0.13,
|
403 |
-
"learning_rate": 0.00017490228348076528,
|
404 |
-
"loss": 1.2149,
|
405 |
-
"step": 1320
|
406 |
-
},
|
407 |
-
{
|
408 |
-
"epoch": 0.14,
|
409 |
-
"learning_rate": 0.0001744908455050401,
|
410 |
-
"loss": 1.2417,
|
411 |
-
"step": 1340
|
412 |
-
},
|
413 |
-
{
|
414 |
-
"epoch": 0.14,
|
415 |
-
"learning_rate": 0.00017407940752931498,
|
416 |
-
"loss": 1.1939,
|
417 |
-
"step": 1360
|
418 |
-
},
|
419 |
-
{
|
420 |
-
"epoch": 0.14,
|
421 |
-
"learning_rate": 0.00017366796955358983,
|
422 |
-
"loss": 1.2688,
|
423 |
-
"step": 1380
|
424 |
-
},
|
425 |
-
{
|
426 |
-
"epoch": 0.14,
|
427 |
-
"learning_rate": 0.00017325653157786465,
|
428 |
-
"loss": 1.2287,
|
429 |
-
"step": 1400
|
430 |
-
},
|
431 |
-
{
|
432 |
-
"epoch": 0.14,
|
433 |
-
"learning_rate": 0.0001728450936021395,
|
434 |
-
"loss": 1.2931,
|
435 |
-
"step": 1420
|
436 |
-
},
|
437 |
-
{
|
438 |
-
"epoch": 0.15,
|
439 |
-
"learning_rate": 0.00017243365562641435,
|
440 |
-
"loss": 1.2695,
|
441 |
-
"step": 1440
|
442 |
-
},
|
443 |
-
{
|
444 |
-
"epoch": 0.15,
|
445 |
-
"learning_rate": 0.00017202221765068917,
|
446 |
-
"loss": 1.2228,
|
447 |
-
"step": 1460
|
448 |
-
},
|
449 |
-
{
|
450 |
-
"epoch": 0.15,
|
451 |
-
"learning_rate": 0.00017161077967496401,
|
452 |
-
"loss": 1.2419,
|
453 |
-
"step": 1480
|
454 |
-
},
|
455 |
-
{
|
456 |
-
"epoch": 0.15,
|
457 |
-
"learning_rate": 0.00017119934169923886,
|
458 |
-
"loss": 1.2483,
|
459 |
-
"step": 1500
|
460 |
-
},
|
461 |
-
{
|
462 |
-
"epoch": 0.15,
|
463 |
-
"learning_rate": 0.0001707879037235137,
|
464 |
-
"loss": 1.2144,
|
465 |
-
"step": 1520
|
466 |
-
},
|
467 |
-
{
|
468 |
-
"epoch": 0.16,
|
469 |
-
"learning_rate": 0.00017037646574778853,
|
470 |
-
"loss": 1.2148,
|
471 |
-
"step": 1540
|
472 |
-
},
|
473 |
-
{
|
474 |
-
"epoch": 0.16,
|
475 |
-
"learning_rate": 0.00016996502777206338,
|
476 |
-
"loss": 1.2196,
|
477 |
-
"step": 1560
|
478 |
-
},
|
479 |
-
{
|
480 |
-
"epoch": 0.16,
|
481 |
-
"learning_rate": 0.00016955358979633823,
|
482 |
-
"loss": 1.2581,
|
483 |
-
"step": 1580
|
484 |
-
},
|
485 |
-
{
|
486 |
-
"epoch": 0.16,
|
487 |
-
"learning_rate": 0.00016914215182061305,
|
488 |
-
"loss": 1.2414,
|
489 |
-
"step": 1600
|
490 |
-
},
|
491 |
-
{
|
492 |
-
"epoch": 0.16,
|
493 |
-
"learning_rate": 0.0001687307138448879,
|
494 |
-
"loss": 1.2357,
|
495 |
-
"step": 1620
|
496 |
-
},
|
497 |
-
{
|
498 |
-
"epoch": 0.17,
|
499 |
-
"learning_rate": 0.00016831927586916274,
|
500 |
-
"loss": 1.22,
|
501 |
-
"step": 1640
|
502 |
-
},
|
503 |
-
{
|
504 |
-
"epoch": 0.17,
|
505 |
-
"learning_rate": 0.0001679078378934376,
|
506 |
-
"loss": 1.2234,
|
507 |
-
"step": 1660
|
508 |
-
},
|
509 |
-
{
|
510 |
-
"epoch": 0.17,
|
511 |
-
"learning_rate": 0.00016749639991771241,
|
512 |
-
"loss": 1.2291,
|
513 |
-
"step": 1680
|
514 |
-
},
|
515 |
-
{
|
516 |
-
"epoch": 0.17,
|
517 |
-
"learning_rate": 0.00016708496194198726,
|
518 |
-
"loss": 1.2344,
|
519 |
-
"step": 1700
|
520 |
-
},
|
521 |
-
{
|
522 |
-
"epoch": 0.18,
|
523 |
-
"learning_rate": 0.0001666735239662621,
|
524 |
-
"loss": 1.1987,
|
525 |
-
"step": 1720
|
526 |
-
},
|
527 |
-
{
|
528 |
-
"epoch": 0.18,
|
529 |
-
"learning_rate": 0.00016626208599053693,
|
530 |
-
"loss": 1.2232,
|
531 |
-
"step": 1740
|
532 |
-
},
|
533 |
-
{
|
534 |
-
"epoch": 0.18,
|
535 |
-
"learning_rate": 0.00016585064801481178,
|
536 |
-
"loss": 1.2402,
|
537 |
-
"step": 1760
|
538 |
-
},
|
539 |
-
{
|
540 |
-
"epoch": 0.18,
|
541 |
-
"learning_rate": 0.00016543921003908663,
|
542 |
-
"loss": 1.1475,
|
543 |
-
"step": 1780
|
544 |
-
},
|
545 |
-
{
|
546 |
-
"epoch": 0.18,
|
547 |
-
"learning_rate": 0.00016502777206336148,
|
548 |
-
"loss": 1.272,
|
549 |
-
"step": 1800
|
550 |
-
},
|
551 |
-
{
|
552 |
-
"epoch": 0.19,
|
553 |
-
"learning_rate": 0.0001646163340876363,
|
554 |
-
"loss": 1.2369,
|
555 |
-
"step": 1820
|
556 |
-
},
|
557 |
-
{
|
558 |
-
"epoch": 0.19,
|
559 |
-
"learning_rate": 0.00016420489611191114,
|
560 |
-
"loss": 1.212,
|
561 |
-
"step": 1840
|
562 |
-
},
|
563 |
-
{
|
564 |
-
"epoch": 0.19,
|
565 |
-
"learning_rate": 0.000163793458136186,
|
566 |
-
"loss": 1.2191,
|
567 |
-
"step": 1860
|
568 |
-
},
|
569 |
-
{
|
570 |
-
"epoch": 0.19,
|
571 |
-
"learning_rate": 0.00016338202016046081,
|
572 |
-
"loss": 1.1953,
|
573 |
-
"step": 1880
|
574 |
-
},
|
575 |
-
{
|
576 |
-
"epoch": 0.19,
|
577 |
-
"learning_rate": 0.00016297058218473566,
|
578 |
-
"loss": 1.2126,
|
579 |
-
"step": 1900
|
580 |
-
},
|
581 |
-
{
|
582 |
-
"epoch": 0.2,
|
583 |
-
"learning_rate": 0.0001625591442090105,
|
584 |
-
"loss": 1.2396,
|
585 |
-
"step": 1920
|
586 |
-
},
|
587 |
-
{
|
588 |
-
"epoch": 0.2,
|
589 |
-
"learning_rate": 0.00016214770623328536,
|
590 |
-
"loss": 1.1468,
|
591 |
-
"step": 1940
|
592 |
-
},
|
593 |
-
{
|
594 |
-
"epoch": 0.2,
|
595 |
-
"learning_rate": 0.00016173626825756018,
|
596 |
-
"loss": 1.1847,
|
597 |
-
"step": 1960
|
598 |
-
},
|
599 |
-
{
|
600 |
-
"epoch": 0.2,
|
601 |
-
"learning_rate": 0.00016132483028183503,
|
602 |
-
"loss": 1.2214,
|
603 |
-
"step": 1980
|
604 |
-
},
|
605 |
-
{
|
606 |
-
"epoch": 0.2,
|
607 |
-
"learning_rate": 0.00016091339230610988,
|
608 |
-
"loss": 1.2138,
|
609 |
-
"step": 2000
|
610 |
-
},
|
611 |
-
{
|
612 |
-
"epoch": 0.21,
|
613 |
-
"learning_rate": 0.0001605019543303847,
|
614 |
-
"loss": 1.2158,
|
615 |
-
"step": 2020
|
616 |
-
},
|
617 |
-
{
|
618 |
-
"epoch": 0.21,
|
619 |
-
"learning_rate": 0.00016009051635465954,
|
620 |
-
"loss": 1.2065,
|
621 |
-
"step": 2040
|
622 |
-
},
|
623 |
-
{
|
624 |
-
"epoch": 0.21,
|
625 |
-
"learning_rate": 0.0001596790783789344,
|
626 |
-
"loss": 1.2048,
|
627 |
-
"step": 2060
|
628 |
-
},
|
629 |
-
{
|
630 |
-
"epoch": 0.21,
|
631 |
-
"learning_rate": 0.00015926764040320924,
|
632 |
-
"loss": 1.1904,
|
633 |
-
"step": 2080
|
634 |
-
},
|
635 |
-
{
|
636 |
-
"epoch": 0.21,
|
637 |
-
"learning_rate": 0.00015885620242748406,
|
638 |
-
"loss": 1.1872,
|
639 |
-
"step": 2100
|
640 |
-
},
|
641 |
-
{
|
642 |
-
"epoch": 0.22,
|
643 |
-
"learning_rate": 0.0001584447644517589,
|
644 |
-
"loss": 1.2087,
|
645 |
-
"step": 2120
|
646 |
-
},
|
647 |
-
{
|
648 |
-
"epoch": 0.22,
|
649 |
-
"learning_rate": 0.00015803332647603376,
|
650 |
-
"loss": 1.1909,
|
651 |
-
"step": 2140
|
652 |
-
},
|
653 |
-
{
|
654 |
-
"epoch": 0.22,
|
655 |
-
"learning_rate": 0.00015762188850030858,
|
656 |
-
"loss": 1.2609,
|
657 |
-
"step": 2160
|
658 |
-
},
|
659 |
-
{
|
660 |
-
"epoch": 0.22,
|
661 |
-
"learning_rate": 0.00015721045052458343,
|
662 |
-
"loss": 1.2083,
|
663 |
-
"step": 2180
|
664 |
-
},
|
665 |
-
{
|
666 |
-
"epoch": 0.22,
|
667 |
-
"learning_rate": 0.00015679901254885827,
|
668 |
-
"loss": 1.2205,
|
669 |
-
"step": 2200
|
670 |
-
},
|
671 |
-
{
|
672 |
-
"epoch": 0.23,
|
673 |
-
"learning_rate": 0.00015638757457313312,
|
674 |
-
"loss": 1.1966,
|
675 |
-
"step": 2220
|
676 |
-
},
|
677 |
-
{
|
678 |
-
"epoch": 0.23,
|
679 |
-
"learning_rate": 0.00015597613659740794,
|
680 |
-
"loss": 1.235,
|
681 |
-
"step": 2240
|
682 |
-
},
|
683 |
-
{
|
684 |
-
"epoch": 0.23,
|
685 |
-
"learning_rate": 0.0001555646986216828,
|
686 |
-
"loss": 1.2488,
|
687 |
-
"step": 2260
|
688 |
-
},
|
689 |
-
{
|
690 |
-
"epoch": 0.23,
|
691 |
-
"learning_rate": 0.00015515326064595764,
|
692 |
-
"loss": 1.2229,
|
693 |
-
"step": 2280
|
694 |
-
},
|
695 |
-
{
|
696 |
-
"epoch": 0.23,
|
697 |
-
"learning_rate": 0.00015474182267023246,
|
698 |
-
"loss": 1.1874,
|
699 |
-
"step": 2300
|
700 |
-
},
|
701 |
-
{
|
702 |
-
"epoch": 0.24,
|
703 |
-
"learning_rate": 0.0001543303846945073,
|
704 |
-
"loss": 1.1724,
|
705 |
-
"step": 2320
|
706 |
-
},
|
707 |
-
{
|
708 |
-
"epoch": 0.24,
|
709 |
-
"learning_rate": 0.00015391894671878216,
|
710 |
-
"loss": 1.1786,
|
711 |
-
"step": 2340
|
712 |
-
},
|
713 |
-
{
|
714 |
-
"epoch": 0.24,
|
715 |
-
"learning_rate": 0.000153507508743057,
|
716 |
-
"loss": 1.1913,
|
717 |
-
"step": 2360
|
718 |
-
},
|
719 |
-
{
|
720 |
-
"epoch": 0.24,
|
721 |
-
"learning_rate": 0.00015309607076733183,
|
722 |
-
"loss": 1.2138,
|
723 |
-
"step": 2380
|
724 |
-
},
|
725 |
-
{
|
726 |
-
"epoch": 0.24,
|
727 |
-
"learning_rate": 0.00015268463279160667,
|
728 |
-
"loss": 1.1954,
|
729 |
-
"step": 2400
|
730 |
-
},
|
731 |
-
{
|
732 |
-
"epoch": 0.25,
|
733 |
-
"learning_rate": 0.00015227319481588152,
|
734 |
-
"loss": 1.1926,
|
735 |
-
"step": 2420
|
736 |
-
},
|
737 |
-
{
|
738 |
-
"epoch": 0.25,
|
739 |
-
"learning_rate": 0.00015186175684015634,
|
740 |
-
"loss": 1.1893,
|
741 |
-
"step": 2440
|
742 |
-
},
|
743 |
-
{
|
744 |
-
"epoch": 0.25,
|
745 |
-
"learning_rate": 0.0001514503188644312,
|
746 |
-
"loss": 1.2152,
|
747 |
-
"step": 2460
|
748 |
-
},
|
749 |
-
{
|
750 |
-
"epoch": 0.25,
|
751 |
-
"learning_rate": 0.00015103888088870604,
|
752 |
-
"loss": 1.199,
|
753 |
-
"step": 2480
|
754 |
-
},
|
755 |
-
{
|
756 |
-
"epoch": 0.25,
|
757 |
-
"learning_rate": 0.0001506274429129809,
|
758 |
-
"loss": 1.1966,
|
759 |
-
"step": 2500
|
760 |
-
},
|
761 |
-
{
|
762 |
-
"epoch": 0.26,
|
763 |
-
"learning_rate": 0.0001502160049372557,
|
764 |
-
"loss": 1.2042,
|
765 |
-
"step": 2520
|
766 |
-
},
|
767 |
-
{
|
768 |
-
"epoch": 0.26,
|
769 |
-
"learning_rate": 0.00014980456696153056,
|
770 |
-
"loss": 1.1901,
|
771 |
-
"step": 2540
|
772 |
-
},
|
773 |
-
{
|
774 |
-
"epoch": 0.26,
|
775 |
-
"learning_rate": 0.0001493931289858054,
|
776 |
-
"loss": 1.2023,
|
777 |
-
"step": 2560
|
778 |
-
},
|
779 |
-
{
|
780 |
-
"epoch": 0.26,
|
781 |
-
"learning_rate": 0.00014898169101008023,
|
782 |
-
"loss": 1.1597,
|
783 |
-
"step": 2580
|
784 |
-
},
|
785 |
-
{
|
786 |
-
"epoch": 0.26,
|
787 |
-
"learning_rate": 0.00014857025303435507,
|
788 |
-
"loss": 1.1828,
|
789 |
-
"step": 2600
|
790 |
-
},
|
791 |
-
{
|
792 |
-
"epoch": 0.27,
|
793 |
-
"learning_rate": 0.00014815881505862992,
|
794 |
-
"loss": 1.1907,
|
795 |
-
"step": 2620
|
796 |
-
},
|
797 |
-
{
|
798 |
-
"epoch": 0.27,
|
799 |
-
"learning_rate": 0.00014774737708290477,
|
800 |
-
"loss": 1.1477,
|
801 |
-
"step": 2640
|
802 |
-
},
|
803 |
-
{
|
804 |
-
"epoch": 0.27,
|
805 |
-
"learning_rate": 0.0001473359391071796,
|
806 |
-
"loss": 1.2146,
|
807 |
-
"step": 2660
|
808 |
-
},
|
809 |
-
{
|
810 |
-
"epoch": 0.27,
|
811 |
-
"learning_rate": 0.00014692450113145444,
|
812 |
-
"loss": 1.2327,
|
813 |
-
"step": 2680
|
814 |
-
},
|
815 |
-
{
|
816 |
-
"epoch": 0.27,
|
817 |
-
"learning_rate": 0.0001465130631557293,
|
818 |
-
"loss": 1.1747,
|
819 |
-
"step": 2700
|
820 |
-
},
|
821 |
-
{
|
822 |
-
"epoch": 0.28,
|
823 |
-
"learning_rate": 0.0001461016251800041,
|
824 |
-
"loss": 1.1745,
|
825 |
-
"step": 2720
|
826 |
-
},
|
827 |
-
{
|
828 |
-
"epoch": 0.28,
|
829 |
-
"learning_rate": 0.00014569018720427896,
|
830 |
-
"loss": 1.1914,
|
831 |
-
"step": 2740
|
832 |
-
},
|
833 |
-
{
|
834 |
-
"epoch": 0.28,
|
835 |
-
"learning_rate": 0.0001452787492285538,
|
836 |
-
"loss": 1.1781,
|
837 |
-
"step": 2760
|
838 |
-
},
|
839 |
-
{
|
840 |
-
"epoch": 0.28,
|
841 |
-
"learning_rate": 0.00014486731125282865,
|
842 |
-
"loss": 1.1819,
|
843 |
-
"step": 2780
|
844 |
-
},
|
845 |
-
{
|
846 |
-
"epoch": 0.29,
|
847 |
-
"learning_rate": 0.00014445587327710347,
|
848 |
-
"loss": 1.1894,
|
849 |
-
"step": 2800
|
850 |
-
},
|
851 |
-
{
|
852 |
-
"epoch": 0.29,
|
853 |
-
"learning_rate": 0.00014404443530137832,
|
854 |
-
"loss": 1.2198,
|
855 |
-
"step": 2820
|
856 |
-
},
|
857 |
-
{
|
858 |
-
"epoch": 0.29,
|
859 |
-
"learning_rate": 0.00014363299732565317,
|
860 |
-
"loss": 1.1464,
|
861 |
-
"step": 2840
|
862 |
-
},
|
863 |
-
{
|
864 |
-
"epoch": 0.29,
|
865 |
-
"learning_rate": 0.000143221559349928,
|
866 |
-
"loss": 1.2039,
|
867 |
-
"step": 2860
|
868 |
-
},
|
869 |
-
{
|
870 |
-
"epoch": 0.29,
|
871 |
-
"learning_rate": 0.00014281012137420284,
|
872 |
-
"loss": 1.1758,
|
873 |
-
"step": 2880
|
874 |
-
},
|
875 |
-
{
|
876 |
-
"epoch": 0.3,
|
877 |
-
"learning_rate": 0.0001423986833984777,
|
878 |
-
"loss": 1.1958,
|
879 |
-
"step": 2900
|
880 |
-
},
|
881 |
-
{
|
882 |
-
"epoch": 0.3,
|
883 |
-
"learning_rate": 0.00014198724542275254,
|
884 |
-
"loss": 1.2163,
|
885 |
-
"step": 2920
|
886 |
-
},
|
887 |
-
{
|
888 |
-
"epoch": 0.3,
|
889 |
-
"learning_rate": 0.00014157580744702736,
|
890 |
-
"loss": 1.1724,
|
891 |
-
"step": 2940
|
892 |
-
},
|
893 |
-
{
|
894 |
-
"epoch": 0.3,
|
895 |
-
"learning_rate": 0.0001411643694713022,
|
896 |
-
"loss": 1.1339,
|
897 |
-
"step": 2960
|
898 |
-
},
|
899 |
-
{
|
900 |
-
"epoch": 0.3,
|
901 |
-
"learning_rate": 0.00014075293149557705,
|
902 |
-
"loss": 1.1752,
|
903 |
-
"step": 2980
|
904 |
-
},
|
905 |
-
{
|
906 |
-
"epoch": 0.31,
|
907 |
-
"learning_rate": 0.00014034149351985187,
|
908 |
-
"loss": 1.2221,
|
909 |
-
"step": 3000
|
910 |
-
}
|
911 |
-
],
|
912 |
-
"logging_steps": 20,
|
913 |
-
"max_steps": 9822,
|
914 |
-
"num_train_epochs": 1,
|
915 |
-
"save_steps": 500,
|
916 |
-
"total_flos": 4.354600931033088e+16,
|
917 |
-
"trial_name": null,
|
918 |
-
"trial_params": null
|
919 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3000/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:8819fc087370c0c0dd1869922822cf7a5ebe84fa7a7194c69a0ec917ff22569b
|
3 |
-
size 4027
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/README.md
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
---
|
2 |
-
library_name: peft
|
3 |
-
---
|
4 |
-
## Training procedure
|
5 |
-
|
6 |
-
|
7 |
-
The following `bitsandbytes` quantization config was used during training:
|
8 |
-
- quant_method: QuantizationMethod.BITS_AND_BYTES
|
9 |
-
- load_in_8bit: False
|
10 |
-
- load_in_4bit: True
|
11 |
-
- llm_int8_threshold: 6.0
|
12 |
-
- llm_int8_skip_modules: None
|
13 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
14 |
-
- llm_int8_has_fp16_weight: False
|
15 |
-
- bnb_4bit_quant_type: nf4
|
16 |
-
- bnb_4bit_use_double_quant: True
|
17 |
-
- bnb_4bit_compute_dtype: bfloat16
|
18 |
-
### Framework versions
|
19 |
-
|
20 |
-
|
21 |
-
- PEFT 0.4.0.dev0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/adapter_config.json
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"base_model_name_or_path": "codellama/CodeLlama-13b-Instruct-hf",
|
3 |
-
"bias": "none",
|
4 |
-
"fan_in_fan_out": false,
|
5 |
-
"inference_mode": true,
|
6 |
-
"init_lora_weights": true,
|
7 |
-
"layers_pattern": null,
|
8 |
-
"layers_to_transform": null,
|
9 |
-
"lora_alpha": 32,
|
10 |
-
"lora_dropout": 0.05,
|
11 |
-
"modules_to_save": null,
|
12 |
-
"peft_type": "LORA",
|
13 |
-
"r": 8,
|
14 |
-
"revision": null,
|
15 |
-
"target_modules": [
|
16 |
-
"q_proj",
|
17 |
-
"k_proj",
|
18 |
-
"v_proj"
|
19 |
-
],
|
20 |
-
"task_type": "CAUSAL_LM"
|
21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/adapter_model.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:521f84dabed28cf35859ef05146c0171c22391e57dea16b1e4d0556eb54e4e31
|
3 |
-
size 39407821
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/optimizer.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:5422fc43dd6fd45aa435cf2033829bfbac9d5137da712d2b8184c9879cc062f3
|
3 |
-
size 78844165
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/rng_state.pth
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:f7b184e8b069b3c1f57297aa4ec5d496c4e561754a274e25a0d6f7951852d0c7
|
3 |
-
size 14575
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/scheduler.pt
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:902a812f67d1fbc515b8e9637f7a043a8e94503bc816c37f0218207b22ea285a
|
3 |
-
size 627
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/trainer_state.json
DELETED
@@ -1,1069 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"best_metric": null,
|
3 |
-
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.35633836873385344,
|
5 |
-
"eval_steps": 500,
|
6 |
-
"global_step": 3500,
|
7 |
-
"is_hyper_param_search": false,
|
8 |
-
"is_local_process_zero": true,
|
9 |
-
"is_world_process_zero": true,
|
10 |
-
"log_history": [
|
11 |
-
{
|
12 |
-
"epoch": 0.0,
|
13 |
-
"learning_rate": 4e-05,
|
14 |
-
"loss": 3.5766,
|
15 |
-
"step": 20
|
16 |
-
},
|
17 |
-
{
|
18 |
-
"epoch": 0.0,
|
19 |
-
"learning_rate": 8e-05,
|
20 |
-
"loss": 2.9038,
|
21 |
-
"step": 40
|
22 |
-
},
|
23 |
-
{
|
24 |
-
"epoch": 0.01,
|
25 |
-
"learning_rate": 0.00012,
|
26 |
-
"loss": 1.9072,
|
27 |
-
"step": 60
|
28 |
-
},
|
29 |
-
{
|
30 |
-
"epoch": 0.01,
|
31 |
-
"learning_rate": 0.00016,
|
32 |
-
"loss": 1.7359,
|
33 |
-
"step": 80
|
34 |
-
},
|
35 |
-
{
|
36 |
-
"epoch": 0.01,
|
37 |
-
"learning_rate": 0.0002,
|
38 |
-
"loss": 1.5349,
|
39 |
-
"step": 100
|
40 |
-
},
|
41 |
-
{
|
42 |
-
"epoch": 0.01,
|
43 |
-
"learning_rate": 0.00019958856202427486,
|
44 |
-
"loss": 1.4908,
|
45 |
-
"step": 120
|
46 |
-
},
|
47 |
-
{
|
48 |
-
"epoch": 0.01,
|
49 |
-
"learning_rate": 0.00019917712404854968,
|
50 |
-
"loss": 1.4931,
|
51 |
-
"step": 140
|
52 |
-
},
|
53 |
-
{
|
54 |
-
"epoch": 0.02,
|
55 |
-
"learning_rate": 0.00019876568607282453,
|
56 |
-
"loss": 1.4201,
|
57 |
-
"step": 160
|
58 |
-
},
|
59 |
-
{
|
60 |
-
"epoch": 0.02,
|
61 |
-
"learning_rate": 0.00019835424809709937,
|
62 |
-
"loss": 1.4541,
|
63 |
-
"step": 180
|
64 |
-
},
|
65 |
-
{
|
66 |
-
"epoch": 0.02,
|
67 |
-
"learning_rate": 0.00019794281012137422,
|
68 |
-
"loss": 1.4102,
|
69 |
-
"step": 200
|
70 |
-
},
|
71 |
-
{
|
72 |
-
"epoch": 0.02,
|
73 |
-
"learning_rate": 0.00019753137214564904,
|
74 |
-
"loss": 1.3861,
|
75 |
-
"step": 220
|
76 |
-
},
|
77 |
-
{
|
78 |
-
"epoch": 0.02,
|
79 |
-
"learning_rate": 0.0001971199341699239,
|
80 |
-
"loss": 1.3939,
|
81 |
-
"step": 240
|
82 |
-
},
|
83 |
-
{
|
84 |
-
"epoch": 0.03,
|
85 |
-
"learning_rate": 0.00019670849619419874,
|
86 |
-
"loss": 1.3883,
|
87 |
-
"step": 260
|
88 |
-
},
|
89 |
-
{
|
90 |
-
"epoch": 0.03,
|
91 |
-
"learning_rate": 0.00019629705821847356,
|
92 |
-
"loss": 1.3257,
|
93 |
-
"step": 280
|
94 |
-
},
|
95 |
-
{
|
96 |
-
"epoch": 0.03,
|
97 |
-
"learning_rate": 0.0001958856202427484,
|
98 |
-
"loss": 1.386,
|
99 |
-
"step": 300
|
100 |
-
},
|
101 |
-
{
|
102 |
-
"epoch": 0.03,
|
103 |
-
"learning_rate": 0.00019547418226702326,
|
104 |
-
"loss": 1.3746,
|
105 |
-
"step": 320
|
106 |
-
},
|
107 |
-
{
|
108 |
-
"epoch": 0.03,
|
109 |
-
"learning_rate": 0.0001950627442912981,
|
110 |
-
"loss": 1.3266,
|
111 |
-
"step": 340
|
112 |
-
},
|
113 |
-
{
|
114 |
-
"epoch": 0.04,
|
115 |
-
"learning_rate": 0.00019465130631557293,
|
116 |
-
"loss": 1.3591,
|
117 |
-
"step": 360
|
118 |
-
},
|
119 |
-
{
|
120 |
-
"epoch": 0.04,
|
121 |
-
"learning_rate": 0.00019423986833984777,
|
122 |
-
"loss": 1.3464,
|
123 |
-
"step": 380
|
124 |
-
},
|
125 |
-
{
|
126 |
-
"epoch": 0.04,
|
127 |
-
"learning_rate": 0.00019382843036412262,
|
128 |
-
"loss": 1.3573,
|
129 |
-
"step": 400
|
130 |
-
},
|
131 |
-
{
|
132 |
-
"epoch": 0.04,
|
133 |
-
"learning_rate": 0.00019341699238839744,
|
134 |
-
"loss": 1.3455,
|
135 |
-
"step": 420
|
136 |
-
},
|
137 |
-
{
|
138 |
-
"epoch": 0.04,
|
139 |
-
"learning_rate": 0.0001930055544126723,
|
140 |
-
"loss": 1.3223,
|
141 |
-
"step": 440
|
142 |
-
},
|
143 |
-
{
|
144 |
-
"epoch": 0.05,
|
145 |
-
"learning_rate": 0.00019259411643694714,
|
146 |
-
"loss": 1.3293,
|
147 |
-
"step": 460
|
148 |
-
},
|
149 |
-
{
|
150 |
-
"epoch": 0.05,
|
151 |
-
"learning_rate": 0.000192182678461222,
|
152 |
-
"loss": 1.3033,
|
153 |
-
"step": 480
|
154 |
-
},
|
155 |
-
{
|
156 |
-
"epoch": 0.05,
|
157 |
-
"learning_rate": 0.0001917712404854968,
|
158 |
-
"loss": 1.3323,
|
159 |
-
"step": 500
|
160 |
-
},
|
161 |
-
{
|
162 |
-
"epoch": 0.05,
|
163 |
-
"learning_rate": 0.00019135980250977166,
|
164 |
-
"loss": 1.2838,
|
165 |
-
"step": 520
|
166 |
-
},
|
167 |
-
{
|
168 |
-
"epoch": 0.05,
|
169 |
-
"learning_rate": 0.0001909483645340465,
|
170 |
-
"loss": 1.332,
|
171 |
-
"step": 540
|
172 |
-
},
|
173 |
-
{
|
174 |
-
"epoch": 0.06,
|
175 |
-
"learning_rate": 0.00019053692655832133,
|
176 |
-
"loss": 1.3118,
|
177 |
-
"step": 560
|
178 |
-
},
|
179 |
-
{
|
180 |
-
"epoch": 0.06,
|
181 |
-
"learning_rate": 0.00019012548858259617,
|
182 |
-
"loss": 1.3237,
|
183 |
-
"step": 580
|
184 |
-
},
|
185 |
-
{
|
186 |
-
"epoch": 0.06,
|
187 |
-
"learning_rate": 0.00018971405060687102,
|
188 |
-
"loss": 1.2541,
|
189 |
-
"step": 600
|
190 |
-
},
|
191 |
-
{
|
192 |
-
"epoch": 0.06,
|
193 |
-
"learning_rate": 0.00018930261263114587,
|
194 |
-
"loss": 1.28,
|
195 |
-
"step": 620
|
196 |
-
},
|
197 |
-
{
|
198 |
-
"epoch": 0.07,
|
199 |
-
"learning_rate": 0.0001888911746554207,
|
200 |
-
"loss": 1.231,
|
201 |
-
"step": 640
|
202 |
-
},
|
203 |
-
{
|
204 |
-
"epoch": 0.07,
|
205 |
-
"learning_rate": 0.00018847973667969554,
|
206 |
-
"loss": 1.2676,
|
207 |
-
"step": 660
|
208 |
-
},
|
209 |
-
{
|
210 |
-
"epoch": 0.07,
|
211 |
-
"learning_rate": 0.0001880682987039704,
|
212 |
-
"loss": 1.2909,
|
213 |
-
"step": 680
|
214 |
-
},
|
215 |
-
{
|
216 |
-
"epoch": 0.07,
|
217 |
-
"learning_rate": 0.0001876568607282452,
|
218 |
-
"loss": 1.2499,
|
219 |
-
"step": 700
|
220 |
-
},
|
221 |
-
{
|
222 |
-
"epoch": 0.07,
|
223 |
-
"learning_rate": 0.00018724542275252006,
|
224 |
-
"loss": 1.2679,
|
225 |
-
"step": 720
|
226 |
-
},
|
227 |
-
{
|
228 |
-
"epoch": 0.08,
|
229 |
-
"learning_rate": 0.0001868339847767949,
|
230 |
-
"loss": 1.2674,
|
231 |
-
"step": 740
|
232 |
-
},
|
233 |
-
{
|
234 |
-
"epoch": 0.08,
|
235 |
-
"learning_rate": 0.00018642254680106975,
|
236 |
-
"loss": 1.2736,
|
237 |
-
"step": 760
|
238 |
-
},
|
239 |
-
{
|
240 |
-
"epoch": 0.08,
|
241 |
-
"learning_rate": 0.00018601110882534457,
|
242 |
-
"loss": 1.2843,
|
243 |
-
"step": 780
|
244 |
-
},
|
245 |
-
{
|
246 |
-
"epoch": 0.08,
|
247 |
-
"learning_rate": 0.00018559967084961942,
|
248 |
-
"loss": 1.281,
|
249 |
-
"step": 800
|
250 |
-
},
|
251 |
-
{
|
252 |
-
"epoch": 0.08,
|
253 |
-
"learning_rate": 0.00018518823287389427,
|
254 |
-
"loss": 1.3699,
|
255 |
-
"step": 820
|
256 |
-
},
|
257 |
-
{
|
258 |
-
"epoch": 0.09,
|
259 |
-
"learning_rate": 0.0001847767948981691,
|
260 |
-
"loss": 1.2705,
|
261 |
-
"step": 840
|
262 |
-
},
|
263 |
-
{
|
264 |
-
"epoch": 0.09,
|
265 |
-
"learning_rate": 0.00018436535692244394,
|
266 |
-
"loss": 1.2279,
|
267 |
-
"step": 860
|
268 |
-
},
|
269 |
-
{
|
270 |
-
"epoch": 0.09,
|
271 |
-
"learning_rate": 0.0001839539189467188,
|
272 |
-
"loss": 1.2779,
|
273 |
-
"step": 880
|
274 |
-
},
|
275 |
-
{
|
276 |
-
"epoch": 0.09,
|
277 |
-
"learning_rate": 0.00018354248097099364,
|
278 |
-
"loss": 1.2086,
|
279 |
-
"step": 900
|
280 |
-
},
|
281 |
-
{
|
282 |
-
"epoch": 0.09,
|
283 |
-
"learning_rate": 0.00018313104299526846,
|
284 |
-
"loss": 1.2999,
|
285 |
-
"step": 920
|
286 |
-
},
|
287 |
-
{
|
288 |
-
"epoch": 0.1,
|
289 |
-
"learning_rate": 0.0001827196050195433,
|
290 |
-
"loss": 1.2503,
|
291 |
-
"step": 940
|
292 |
-
},
|
293 |
-
{
|
294 |
-
"epoch": 0.1,
|
295 |
-
"learning_rate": 0.00018230816704381815,
|
296 |
-
"loss": 1.2466,
|
297 |
-
"step": 960
|
298 |
-
},
|
299 |
-
{
|
300 |
-
"epoch": 0.1,
|
301 |
-
"learning_rate": 0.00018189672906809297,
|
302 |
-
"loss": 1.2113,
|
303 |
-
"step": 980
|
304 |
-
},
|
305 |
-
{
|
306 |
-
"epoch": 0.1,
|
307 |
-
"learning_rate": 0.00018148529109236782,
|
308 |
-
"loss": 1.2356,
|
309 |
-
"step": 1000
|
310 |
-
},
|
311 |
-
{
|
312 |
-
"epoch": 0.1,
|
313 |
-
"learning_rate": 0.00018107385311664267,
|
314 |
-
"loss": 1.2631,
|
315 |
-
"step": 1020
|
316 |
-
},
|
317 |
-
{
|
318 |
-
"epoch": 0.11,
|
319 |
-
"learning_rate": 0.00018066241514091752,
|
320 |
-
"loss": 1.2443,
|
321 |
-
"step": 1040
|
322 |
-
},
|
323 |
-
{
|
324 |
-
"epoch": 0.11,
|
325 |
-
"learning_rate": 0.00018025097716519234,
|
326 |
-
"loss": 1.2406,
|
327 |
-
"step": 1060
|
328 |
-
},
|
329 |
-
{
|
330 |
-
"epoch": 0.11,
|
331 |
-
"learning_rate": 0.0001798395391894672,
|
332 |
-
"loss": 1.2308,
|
333 |
-
"step": 1080
|
334 |
-
},
|
335 |
-
{
|
336 |
-
"epoch": 0.11,
|
337 |
-
"learning_rate": 0.00017942810121374204,
|
338 |
-
"loss": 1.2649,
|
339 |
-
"step": 1100
|
340 |
-
},
|
341 |
-
{
|
342 |
-
"epoch": 0.11,
|
343 |
-
"learning_rate": 0.00017901666323801686,
|
344 |
-
"loss": 1.2263,
|
345 |
-
"step": 1120
|
346 |
-
},
|
347 |
-
{
|
348 |
-
"epoch": 0.12,
|
349 |
-
"learning_rate": 0.0001786052252622917,
|
350 |
-
"loss": 1.2869,
|
351 |
-
"step": 1140
|
352 |
-
},
|
353 |
-
{
|
354 |
-
"epoch": 0.12,
|
355 |
-
"learning_rate": 0.00017819378728656655,
|
356 |
-
"loss": 1.2255,
|
357 |
-
"step": 1160
|
358 |
-
},
|
359 |
-
{
|
360 |
-
"epoch": 0.12,
|
361 |
-
"learning_rate": 0.0001777823493108414,
|
362 |
-
"loss": 1.2596,
|
363 |
-
"step": 1180
|
364 |
-
},
|
365 |
-
{
|
366 |
-
"epoch": 0.12,
|
367 |
-
"learning_rate": 0.00017737091133511622,
|
368 |
-
"loss": 1.2748,
|
369 |
-
"step": 1200
|
370 |
-
},
|
371 |
-
{
|
372 |
-
"epoch": 0.12,
|
373 |
-
"learning_rate": 0.00017695947335939107,
|
374 |
-
"loss": 1.2587,
|
375 |
-
"step": 1220
|
376 |
-
},
|
377 |
-
{
|
378 |
-
"epoch": 0.13,
|
379 |
-
"learning_rate": 0.00017654803538366592,
|
380 |
-
"loss": 1.2651,
|
381 |
-
"step": 1240
|
382 |
-
},
|
383 |
-
{
|
384 |
-
"epoch": 0.13,
|
385 |
-
"learning_rate": 0.00017613659740794074,
|
386 |
-
"loss": 1.2659,
|
387 |
-
"step": 1260
|
388 |
-
},
|
389 |
-
{
|
390 |
-
"epoch": 0.13,
|
391 |
-
"learning_rate": 0.0001757251594322156,
|
392 |
-
"loss": 1.2077,
|
393 |
-
"step": 1280
|
394 |
-
},
|
395 |
-
{
|
396 |
-
"epoch": 0.13,
|
397 |
-
"learning_rate": 0.00017531372145649044,
|
398 |
-
"loss": 1.25,
|
399 |
-
"step": 1300
|
400 |
-
},
|
401 |
-
{
|
402 |
-
"epoch": 0.13,
|
403 |
-
"learning_rate": 0.00017490228348076528,
|
404 |
-
"loss": 1.2149,
|
405 |
-
"step": 1320
|
406 |
-
},
|
407 |
-
{
|
408 |
-
"epoch": 0.14,
|
409 |
-
"learning_rate": 0.0001744908455050401,
|
410 |
-
"loss": 1.2417,
|
411 |
-
"step": 1340
|
412 |
-
},
|
413 |
-
{
|
414 |
-
"epoch": 0.14,
|
415 |
-
"learning_rate": 0.00017407940752931498,
|
416 |
-
"loss": 1.1939,
|
417 |
-
"step": 1360
|
418 |
-
},
|
419 |
-
{
|
420 |
-
"epoch": 0.14,
|
421 |
-
"learning_rate": 0.00017366796955358983,
|
422 |
-
"loss": 1.2688,
|
423 |
-
"step": 1380
|
424 |
-
},
|
425 |
-
{
|
426 |
-
"epoch": 0.14,
|
427 |
-
"learning_rate": 0.00017325653157786465,
|
428 |
-
"loss": 1.2287,
|
429 |
-
"step": 1400
|
430 |
-
},
|
431 |
-
{
|
432 |
-
"epoch": 0.14,
|
433 |
-
"learning_rate": 0.0001728450936021395,
|
434 |
-
"loss": 1.2931,
|
435 |
-
"step": 1420
|
436 |
-
},
|
437 |
-
{
|
438 |
-
"epoch": 0.15,
|
439 |
-
"learning_rate": 0.00017243365562641435,
|
440 |
-
"loss": 1.2695,
|
441 |
-
"step": 1440
|
442 |
-
},
|
443 |
-
{
|
444 |
-
"epoch": 0.15,
|
445 |
-
"learning_rate": 0.00017202221765068917,
|
446 |
-
"loss": 1.2228,
|
447 |
-
"step": 1460
|
448 |
-
},
|
449 |
-
{
|
450 |
-
"epoch": 0.15,
|
451 |
-
"learning_rate": 0.00017161077967496401,
|
452 |
-
"loss": 1.2419,
|
453 |
-
"step": 1480
|
454 |
-
},
|
455 |
-
{
|
456 |
-
"epoch": 0.15,
|
457 |
-
"learning_rate": 0.00017119934169923886,
|
458 |
-
"loss": 1.2483,
|
459 |
-
"step": 1500
|
460 |
-
},
|
461 |
-
{
|
462 |
-
"epoch": 0.15,
|
463 |
-
"learning_rate": 0.0001707879037235137,
|
464 |
-
"loss": 1.2144,
|
465 |
-
"step": 1520
|
466 |
-
},
|
467 |
-
{
|
468 |
-
"epoch": 0.16,
|
469 |
-
"learning_rate": 0.00017037646574778853,
|
470 |
-
"loss": 1.2148,
|
471 |
-
"step": 1540
|
472 |
-
},
|
473 |
-
{
|
474 |
-
"epoch": 0.16,
|
475 |
-
"learning_rate": 0.00016996502777206338,
|
476 |
-
"loss": 1.2196,
|
477 |
-
"step": 1560
|
478 |
-
},
|
479 |
-
{
|
480 |
-
"epoch": 0.16,
|
481 |
-
"learning_rate": 0.00016955358979633823,
|
482 |
-
"loss": 1.2581,
|
483 |
-
"step": 1580
|
484 |
-
},
|
485 |
-
{
|
486 |
-
"epoch": 0.16,
|
487 |
-
"learning_rate": 0.00016914215182061305,
|
488 |
-
"loss": 1.2414,
|
489 |
-
"step": 1600
|
490 |
-
},
|
491 |
-
{
|
492 |
-
"epoch": 0.16,
|
493 |
-
"learning_rate": 0.0001687307138448879,
|
494 |
-
"loss": 1.2357,
|
495 |
-
"step": 1620
|
496 |
-
},
|
497 |
-
{
|
498 |
-
"epoch": 0.17,
|
499 |
-
"learning_rate": 0.00016831927586916274,
|
500 |
-
"loss": 1.22,
|
501 |
-
"step": 1640
|
502 |
-
},
|
503 |
-
{
|
504 |
-
"epoch": 0.17,
|
505 |
-
"learning_rate": 0.0001679078378934376,
|
506 |
-
"loss": 1.2234,
|
507 |
-
"step": 1660
|
508 |
-
},
|
509 |
-
{
|
510 |
-
"epoch": 0.17,
|
511 |
-
"learning_rate": 0.00016749639991771241,
|
512 |
-
"loss": 1.2291,
|
513 |
-
"step": 1680
|
514 |
-
},
|
515 |
-
{
|
516 |
-
"epoch": 0.17,
|
517 |
-
"learning_rate": 0.00016708496194198726,
|
518 |
-
"loss": 1.2344,
|
519 |
-
"step": 1700
|
520 |
-
},
|
521 |
-
{
|
522 |
-
"epoch": 0.18,
|
523 |
-
"learning_rate": 0.0001666735239662621,
|
524 |
-
"loss": 1.1987,
|
525 |
-
"step": 1720
|
526 |
-
},
|
527 |
-
{
|
528 |
-
"epoch": 0.18,
|
529 |
-
"learning_rate": 0.00016626208599053693,
|
530 |
-
"loss": 1.2232,
|
531 |
-
"step": 1740
|
532 |
-
},
|
533 |
-
{
|
534 |
-
"epoch": 0.18,
|
535 |
-
"learning_rate": 0.00016585064801481178,
|
536 |
-
"loss": 1.2402,
|
537 |
-
"step": 1760
|
538 |
-
},
|
539 |
-
{
|
540 |
-
"epoch": 0.18,
|
541 |
-
"learning_rate": 0.00016543921003908663,
|
542 |
-
"loss": 1.1475,
|
543 |
-
"step": 1780
|
544 |
-
},
|
545 |
-
{
|
546 |
-
"epoch": 0.18,
|
547 |
-
"learning_rate": 0.00016502777206336148,
|
548 |
-
"loss": 1.272,
|
549 |
-
"step": 1800
|
550 |
-
},
|
551 |
-
{
|
552 |
-
"epoch": 0.19,
|
553 |
-
"learning_rate": 0.0001646163340876363,
|
554 |
-
"loss": 1.2369,
|
555 |
-
"step": 1820
|
556 |
-
},
|
557 |
-
{
|
558 |
-
"epoch": 0.19,
|
559 |
-
"learning_rate": 0.00016420489611191114,
|
560 |
-
"loss": 1.212,
|
561 |
-
"step": 1840
|
562 |
-
},
|
563 |
-
{
|
564 |
-
"epoch": 0.19,
|
565 |
-
"learning_rate": 0.000163793458136186,
|
566 |
-
"loss": 1.2191,
|
567 |
-
"step": 1860
|
568 |
-
},
|
569 |
-
{
|
570 |
-
"epoch": 0.19,
|
571 |
-
"learning_rate": 0.00016338202016046081,
|
572 |
-
"loss": 1.1953,
|
573 |
-
"step": 1880
|
574 |
-
},
|
575 |
-
{
|
576 |
-
"epoch": 0.19,
|
577 |
-
"learning_rate": 0.00016297058218473566,
|
578 |
-
"loss": 1.2126,
|
579 |
-
"step": 1900
|
580 |
-
},
|
581 |
-
{
|
582 |
-
"epoch": 0.2,
|
583 |
-
"learning_rate": 0.0001625591442090105,
|
584 |
-
"loss": 1.2396,
|
585 |
-
"step": 1920
|
586 |
-
},
|
587 |
-
{
|
588 |
-
"epoch": 0.2,
|
589 |
-
"learning_rate": 0.00016214770623328536,
|
590 |
-
"loss": 1.1468,
|
591 |
-
"step": 1940
|
592 |
-
},
|
593 |
-
{
|
594 |
-
"epoch": 0.2,
|
595 |
-
"learning_rate": 0.00016173626825756018,
|
596 |
-
"loss": 1.1847,
|
597 |
-
"step": 1960
|
598 |
-
},
|
599 |
-
{
|
600 |
-
"epoch": 0.2,
|
601 |
-
"learning_rate": 0.00016132483028183503,
|
602 |
-
"loss": 1.2214,
|
603 |
-
"step": 1980
|
604 |
-
},
|
605 |
-
{
|
606 |
-
"epoch": 0.2,
|
607 |
-
"learning_rate": 0.00016091339230610988,
|
608 |
-
"loss": 1.2138,
|
609 |
-
"step": 2000
|
610 |
-
},
|
611 |
-
{
|
612 |
-
"epoch": 0.21,
|
613 |
-
"learning_rate": 0.0001605019543303847,
|
614 |
-
"loss": 1.2158,
|
615 |
-
"step": 2020
|
616 |
-
},
|
617 |
-
{
|
618 |
-
"epoch": 0.21,
|
619 |
-
"learning_rate": 0.00016009051635465954,
|
620 |
-
"loss": 1.2065,
|
621 |
-
"step": 2040
|
622 |
-
},
|
623 |
-
{
|
624 |
-
"epoch": 0.21,
|
625 |
-
"learning_rate": 0.0001596790783789344,
|
626 |
-
"loss": 1.2048,
|
627 |
-
"step": 2060
|
628 |
-
},
|
629 |
-
{
|
630 |
-
"epoch": 0.21,
|
631 |
-
"learning_rate": 0.00015926764040320924,
|
632 |
-
"loss": 1.1904,
|
633 |
-
"step": 2080
|
634 |
-
},
|
635 |
-
{
|
636 |
-
"epoch": 0.21,
|
637 |
-
"learning_rate": 0.00015885620242748406,
|
638 |
-
"loss": 1.1872,
|
639 |
-
"step": 2100
|
640 |
-
},
|
641 |
-
{
|
642 |
-
"epoch": 0.22,
|
643 |
-
"learning_rate": 0.0001584447644517589,
|
644 |
-
"loss": 1.2087,
|
645 |
-
"step": 2120
|
646 |
-
},
|
647 |
-
{
|
648 |
-
"epoch": 0.22,
|
649 |
-
"learning_rate": 0.00015803332647603376,
|
650 |
-
"loss": 1.1909,
|
651 |
-
"step": 2140
|
652 |
-
},
|
653 |
-
{
|
654 |
-
"epoch": 0.22,
|
655 |
-
"learning_rate": 0.00015762188850030858,
|
656 |
-
"loss": 1.2609,
|
657 |
-
"step": 2160
|
658 |
-
},
|
659 |
-
{
|
660 |
-
"epoch": 0.22,
|
661 |
-
"learning_rate": 0.00015721045052458343,
|
662 |
-
"loss": 1.2083,
|
663 |
-
"step": 2180
|
664 |
-
},
|
665 |
-
{
|
666 |
-
"epoch": 0.22,
|
667 |
-
"learning_rate": 0.00015679901254885827,
|
668 |
-
"loss": 1.2205,
|
669 |
-
"step": 2200
|
670 |
-
},
|
671 |
-
{
|
672 |
-
"epoch": 0.23,
|
673 |
-
"learning_rate": 0.00015638757457313312,
|
674 |
-
"loss": 1.1966,
|
675 |
-
"step": 2220
|
676 |
-
},
|
677 |
-
{
|
678 |
-
"epoch": 0.23,
|
679 |
-
"learning_rate": 0.00015597613659740794,
|
680 |
-
"loss": 1.235,
|
681 |
-
"step": 2240
|
682 |
-
},
|
683 |
-
{
|
684 |
-
"epoch": 0.23,
|
685 |
-
"learning_rate": 0.0001555646986216828,
|
686 |
-
"loss": 1.2488,
|
687 |
-
"step": 2260
|
688 |
-
},
|
689 |
-
{
|
690 |
-
"epoch": 0.23,
|
691 |
-
"learning_rate": 0.00015515326064595764,
|
692 |
-
"loss": 1.2229,
|
693 |
-
"step": 2280
|
694 |
-
},
|
695 |
-
{
|
696 |
-
"epoch": 0.23,
|
697 |
-
"learning_rate": 0.00015474182267023246,
|
698 |
-
"loss": 1.1874,
|
699 |
-
"step": 2300
|
700 |
-
},
|
701 |
-
{
|
702 |
-
"epoch": 0.24,
|
703 |
-
"learning_rate": 0.0001543303846945073,
|
704 |
-
"loss": 1.1724,
|
705 |
-
"step": 2320
|
706 |
-
},
|
707 |
-
{
|
708 |
-
"epoch": 0.24,
|
709 |
-
"learning_rate": 0.00015391894671878216,
|
710 |
-
"loss": 1.1786,
|
711 |
-
"step": 2340
|
712 |
-
},
|
713 |
-
{
|
714 |
-
"epoch": 0.24,
|
715 |
-
"learning_rate": 0.000153507508743057,
|
716 |
-
"loss": 1.1913,
|
717 |
-
"step": 2360
|
718 |
-
},
|
719 |
-
{
|
720 |
-
"epoch": 0.24,
|
721 |
-
"learning_rate": 0.00015309607076733183,
|
722 |
-
"loss": 1.2138,
|
723 |
-
"step": 2380
|
724 |
-
},
|
725 |
-
{
|
726 |
-
"epoch": 0.24,
|
727 |
-
"learning_rate": 0.00015268463279160667,
|
728 |
-
"loss": 1.1954,
|
729 |
-
"step": 2400
|
730 |
-
},
|
731 |
-
{
|
732 |
-
"epoch": 0.25,
|
733 |
-
"learning_rate": 0.00015227319481588152,
|
734 |
-
"loss": 1.1926,
|
735 |
-
"step": 2420
|
736 |
-
},
|
737 |
-
{
|
738 |
-
"epoch": 0.25,
|
739 |
-
"learning_rate": 0.00015186175684015634,
|
740 |
-
"loss": 1.1893,
|
741 |
-
"step": 2440
|
742 |
-
},
|
743 |
-
{
|
744 |
-
"epoch": 0.25,
|
745 |
-
"learning_rate": 0.0001514503188644312,
|
746 |
-
"loss": 1.2152,
|
747 |
-
"step": 2460
|
748 |
-
},
|
749 |
-
{
|
750 |
-
"epoch": 0.25,
|
751 |
-
"learning_rate": 0.00015103888088870604,
|
752 |
-
"loss": 1.199,
|
753 |
-
"step": 2480
|
754 |
-
},
|
755 |
-
{
|
756 |
-
"epoch": 0.25,
|
757 |
-
"learning_rate": 0.0001506274429129809,
|
758 |
-
"loss": 1.1966,
|
759 |
-
"step": 2500
|
760 |
-
},
|
761 |
-
{
|
762 |
-
"epoch": 0.26,
|
763 |
-
"learning_rate": 0.0001502160049372557,
|
764 |
-
"loss": 1.2042,
|
765 |
-
"step": 2520
|
766 |
-
},
|
767 |
-
{
|
768 |
-
"epoch": 0.26,
|
769 |
-
"learning_rate": 0.00014980456696153056,
|
770 |
-
"loss": 1.1901,
|
771 |
-
"step": 2540
|
772 |
-
},
|
773 |
-
{
|
774 |
-
"epoch": 0.26,
|
775 |
-
"learning_rate": 0.0001493931289858054,
|
776 |
-
"loss": 1.2023,
|
777 |
-
"step": 2560
|
778 |
-
},
|
779 |
-
{
|
780 |
-
"epoch": 0.26,
|
781 |
-
"learning_rate": 0.00014898169101008023,
|
782 |
-
"loss": 1.1597,
|
783 |
-
"step": 2580
|
784 |
-
},
|
785 |
-
{
|
786 |
-
"epoch": 0.26,
|
787 |
-
"learning_rate": 0.00014857025303435507,
|
788 |
-
"loss": 1.1828,
|
789 |
-
"step": 2600
|
790 |
-
},
|
791 |
-
{
|
792 |
-
"epoch": 0.27,
|
793 |
-
"learning_rate": 0.00014815881505862992,
|
794 |
-
"loss": 1.1907,
|
795 |
-
"step": 2620
|
796 |
-
},
|
797 |
-
{
|
798 |
-
"epoch": 0.27,
|
799 |
-
"learning_rate": 0.00014774737708290477,
|
800 |
-
"loss": 1.1477,
|
801 |
-
"step": 2640
|
802 |
-
},
|
803 |
-
{
|
804 |
-
"epoch": 0.27,
|
805 |
-
"learning_rate": 0.0001473359391071796,
|
806 |
-
"loss": 1.2146,
|
807 |
-
"step": 2660
|
808 |
-
},
|
809 |
-
{
|
810 |
-
"epoch": 0.27,
|
811 |
-
"learning_rate": 0.00014692450113145444,
|
812 |
-
"loss": 1.2327,
|
813 |
-
"step": 2680
|
814 |
-
},
|
815 |
-
{
|
816 |
-
"epoch": 0.27,
|
817 |
-
"learning_rate": 0.0001465130631557293,
|
818 |
-
"loss": 1.1747,
|
819 |
-
"step": 2700
|
820 |
-
},
|
821 |
-
{
|
822 |
-
"epoch": 0.28,
|
823 |
-
"learning_rate": 0.0001461016251800041,
|
824 |
-
"loss": 1.1745,
|
825 |
-
"step": 2720
|
826 |
-
},
|
827 |
-
{
|
828 |
-
"epoch": 0.28,
|
829 |
-
"learning_rate": 0.00014569018720427896,
|
830 |
-
"loss": 1.1914,
|
831 |
-
"step": 2740
|
832 |
-
},
|
833 |
-
{
|
834 |
-
"epoch": 0.28,
|
835 |
-
"learning_rate": 0.0001452787492285538,
|
836 |
-
"loss": 1.1781,
|
837 |
-
"step": 2760
|
838 |
-
},
|
839 |
-
{
|
840 |
-
"epoch": 0.28,
|
841 |
-
"learning_rate": 0.00014486731125282865,
|
842 |
-
"loss": 1.1819,
|
843 |
-
"step": 2780
|
844 |
-
},
|
845 |
-
{
|
846 |
-
"epoch": 0.29,
|
847 |
-
"learning_rate": 0.00014445587327710347,
|
848 |
-
"loss": 1.1894,
|
849 |
-
"step": 2800
|
850 |
-
},
|
851 |
-
{
|
852 |
-
"epoch": 0.29,
|
853 |
-
"learning_rate": 0.00014404443530137832,
|
854 |
-
"loss": 1.2198,
|
855 |
-
"step": 2820
|
856 |
-
},
|
857 |
-
{
|
858 |
-
"epoch": 0.29,
|
859 |
-
"learning_rate": 0.00014363299732565317,
|
860 |
-
"loss": 1.1464,
|
861 |
-
"step": 2840
|
862 |
-
},
|
863 |
-
{
|
864 |
-
"epoch": 0.29,
|
865 |
-
"learning_rate": 0.000143221559349928,
|
866 |
-
"loss": 1.2039,
|
867 |
-
"step": 2860
|
868 |
-
},
|
869 |
-
{
|
870 |
-
"epoch": 0.29,
|
871 |
-
"learning_rate": 0.00014281012137420284,
|
872 |
-
"loss": 1.1758,
|
873 |
-
"step": 2880
|
874 |
-
},
|
875 |
-
{
|
876 |
-
"epoch": 0.3,
|
877 |
-
"learning_rate": 0.0001423986833984777,
|
878 |
-
"loss": 1.1958,
|
879 |
-
"step": 2900
|
880 |
-
},
|
881 |
-
{
|
882 |
-
"epoch": 0.3,
|
883 |
-
"learning_rate": 0.00014198724542275254,
|
884 |
-
"loss": 1.2163,
|
885 |
-
"step": 2920
|
886 |
-
},
|
887 |
-
{
|
888 |
-
"epoch": 0.3,
|
889 |
-
"learning_rate": 0.00014157580744702736,
|
890 |
-
"loss": 1.1724,
|
891 |
-
"step": 2940
|
892 |
-
},
|
893 |
-
{
|
894 |
-
"epoch": 0.3,
|
895 |
-
"learning_rate": 0.0001411643694713022,
|
896 |
-
"loss": 1.1339,
|
897 |
-
"step": 2960
|
898 |
-
},
|
899 |
-
{
|
900 |
-
"epoch": 0.3,
|
901 |
-
"learning_rate": 0.00014075293149557705,
|
902 |
-
"loss": 1.1752,
|
903 |
-
"step": 2980
|
904 |
-
},
|
905 |
-
{
|
906 |
-
"epoch": 0.31,
|
907 |
-
"learning_rate": 0.00014034149351985187,
|
908 |
-
"loss": 1.2221,
|
909 |
-
"step": 3000
|
910 |
-
},
|
911 |
-
{
|
912 |
-
"epoch": 0.31,
|
913 |
-
"learning_rate": 0.00013993005554412672,
|
914 |
-
"loss": 1.1954,
|
915 |
-
"step": 3020
|
916 |
-
},
|
917 |
-
{
|
918 |
-
"epoch": 0.31,
|
919 |
-
"learning_rate": 0.00013951861756840157,
|
920 |
-
"loss": 1.1904,
|
921 |
-
"step": 3040
|
922 |
-
},
|
923 |
-
{
|
924 |
-
"epoch": 0.31,
|
925 |
-
"learning_rate": 0.00013910717959267642,
|
926 |
-
"loss": 1.1601,
|
927 |
-
"step": 3060
|
928 |
-
},
|
929 |
-
{
|
930 |
-
"epoch": 0.31,
|
931 |
-
"learning_rate": 0.00013869574161695124,
|
932 |
-
"loss": 1.148,
|
933 |
-
"step": 3080
|
934 |
-
},
|
935 |
-
{
|
936 |
-
"epoch": 0.32,
|
937 |
-
"learning_rate": 0.0001382843036412261,
|
938 |
-
"loss": 1.2106,
|
939 |
-
"step": 3100
|
940 |
-
},
|
941 |
-
{
|
942 |
-
"epoch": 0.32,
|
943 |
-
"learning_rate": 0.00013787286566550094,
|
944 |
-
"loss": 1.1367,
|
945 |
-
"step": 3120
|
946 |
-
},
|
947 |
-
{
|
948 |
-
"epoch": 0.32,
|
949 |
-
"learning_rate": 0.00013746142768977576,
|
950 |
-
"loss": 1.1951,
|
951 |
-
"step": 3140
|
952 |
-
},
|
953 |
-
{
|
954 |
-
"epoch": 0.32,
|
955 |
-
"learning_rate": 0.0001370499897140506,
|
956 |
-
"loss": 1.1706,
|
957 |
-
"step": 3160
|
958 |
-
},
|
959 |
-
{
|
960 |
-
"epoch": 0.32,
|
961 |
-
"learning_rate": 0.00013663855173832545,
|
962 |
-
"loss": 1.1872,
|
963 |
-
"step": 3180
|
964 |
-
},
|
965 |
-
{
|
966 |
-
"epoch": 0.33,
|
967 |
-
"learning_rate": 0.0001362271137626003,
|
968 |
-
"loss": 1.1498,
|
969 |
-
"step": 3200
|
970 |
-
},
|
971 |
-
{
|
972 |
-
"epoch": 0.33,
|
973 |
-
"learning_rate": 0.00013581567578687512,
|
974 |
-
"loss": 1.2111,
|
975 |
-
"step": 3220
|
976 |
-
},
|
977 |
-
{
|
978 |
-
"epoch": 0.33,
|
979 |
-
"learning_rate": 0.00013540423781114997,
|
980 |
-
"loss": 1.1774,
|
981 |
-
"step": 3240
|
982 |
-
},
|
983 |
-
{
|
984 |
-
"epoch": 0.33,
|
985 |
-
"learning_rate": 0.00013499279983542482,
|
986 |
-
"loss": 1.1927,
|
987 |
-
"step": 3260
|
988 |
-
},
|
989 |
-
{
|
990 |
-
"epoch": 0.33,
|
991 |
-
"learning_rate": 0.00013458136185969964,
|
992 |
-
"loss": 1.1903,
|
993 |
-
"step": 3280
|
994 |
-
},
|
995 |
-
{
|
996 |
-
"epoch": 0.34,
|
997 |
-
"learning_rate": 0.0001341699238839745,
|
998 |
-
"loss": 1.1976,
|
999 |
-
"step": 3300
|
1000 |
-
},
|
1001 |
-
{
|
1002 |
-
"epoch": 0.34,
|
1003 |
-
"learning_rate": 0.00013375848590824934,
|
1004 |
-
"loss": 1.1687,
|
1005 |
-
"step": 3320
|
1006 |
-
},
|
1007 |
-
{
|
1008 |
-
"epoch": 0.34,
|
1009 |
-
"learning_rate": 0.00013334704793252418,
|
1010 |
-
"loss": 1.1598,
|
1011 |
-
"step": 3340
|
1012 |
-
},
|
1013 |
-
{
|
1014 |
-
"epoch": 0.34,
|
1015 |
-
"learning_rate": 0.000132935609956799,
|
1016 |
-
"loss": 1.1414,
|
1017 |
-
"step": 3360
|
1018 |
-
},
|
1019 |
-
{
|
1020 |
-
"epoch": 0.34,
|
1021 |
-
"learning_rate": 0.00013252417198107385,
|
1022 |
-
"loss": 1.169,
|
1023 |
-
"step": 3380
|
1024 |
-
},
|
1025 |
-
{
|
1026 |
-
"epoch": 0.35,
|
1027 |
-
"learning_rate": 0.0001321127340053487,
|
1028 |
-
"loss": 1.1676,
|
1029 |
-
"step": 3400
|
1030 |
-
},
|
1031 |
-
{
|
1032 |
-
"epoch": 0.35,
|
1033 |
-
"learning_rate": 0.00013170129602962352,
|
1034 |
-
"loss": 1.2179,
|
1035 |
-
"step": 3420
|
1036 |
-
},
|
1037 |
-
{
|
1038 |
-
"epoch": 0.35,
|
1039 |
-
"learning_rate": 0.00013128985805389837,
|
1040 |
-
"loss": 1.2435,
|
1041 |
-
"step": 3440
|
1042 |
-
},
|
1043 |
-
{
|
1044 |
-
"epoch": 0.35,
|
1045 |
-
"learning_rate": 0.00013087842007817322,
|
1046 |
-
"loss": 1.1457,
|
1047 |
-
"step": 3460
|
1048 |
-
},
|
1049 |
-
{
|
1050 |
-
"epoch": 0.35,
|
1051 |
-
"learning_rate": 0.00013046698210244807,
|
1052 |
-
"loss": 1.1818,
|
1053 |
-
"step": 3480
|
1054 |
-
},
|
1055 |
-
{
|
1056 |
-
"epoch": 0.36,
|
1057 |
-
"learning_rate": 0.0001300555441267229,
|
1058 |
-
"loss": 1.1666,
|
1059 |
-
"step": 3500
|
1060 |
-
}
|
1061 |
-
],
|
1062 |
-
"logging_steps": 20,
|
1063 |
-
"max_steps": 9822,
|
1064 |
-
"num_train_epochs": 1,
|
1065 |
-
"save_steps": 500,
|
1066 |
-
"total_flos": 5.077651028865024e+16,
|
1067 |
-
"trial_name": null,
|
1068 |
-
"trial_params": null
|
1069 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-3500/training_args.bin
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:8819fc087370c0c0dd1869922822cf7a5ebe84fa7a7194c69a0ec917ff22569b
|
3 |
-
size 4027
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-4000/README.md
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
---
|
2 |
-
library_name: peft
|
3 |
-
---
|
4 |
-
## Training procedure
|
5 |
-
|
6 |
-
|
7 |
-
The following `bitsandbytes` quantization config was used during training:
|
8 |
-
- quant_method: QuantizationMethod.BITS_AND_BYTES
|
9 |
-
- load_in_8bit: False
|
10 |
-
- load_in_4bit: True
|
11 |
-
- llm_int8_threshold: 6.0
|
12 |
-
- llm_int8_skip_modules: None
|
13 |
-
- llm_int8_enable_fp32_cpu_offload: False
|
14 |
-
- llm_int8_has_fp16_weight: False
|
15 |
-
- bnb_4bit_quant_type: nf4
|
16 |
-
- bnb_4bit_use_double_quant: True
|
17 |
-
- bnb_4bit_compute_dtype: bfloat16
|
18 |
-
### Framework versions
|
19 |
-
|
20 |
-
|
21 |
-
- PEFT 0.4.0.dev0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
https:/huggingface.co/AbineshMoonpai/CodeLlama-SQL-13b/tree/main/trainer_outputs/checkpoint-4000/adapter_config.json
DELETED
@@ -1,21 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"base_model_name_or_path": "codellama/CodeLlama-13b-Instruct-hf",
|
3 |
-
"bias": "none",
|
4 |
-
"fan_in_fan_out": false,
|
5 |
-
"inference_mode": true,
|
6 |
-
"init_lora_weights": true,
|
7 |
-
"layers_pattern": null,
|
8 |
-
"layers_to_transform": null,
|
9 |
-
"lora_alpha": 32,
|
10 |
-
"lora_dropout": 0.05,
|
11 |
-
"modules_to_save": null,
|
12 |
-
"peft_type": "LORA",
|
13 |
-
"r": 8,
|
14 |
-
"revision": null,
|
15 |
-
"target_modules": [
|
16 |
-
"q_proj",
|
17 |
-
"k_proj",
|
18 |
-
"v_proj"
|
19 |
-
],
|
20 |
-
"task_type": "CAUSAL_LM"
|
21 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|