duyphu commited on
Commit
1b4b78a
·
verified ·
1 Parent(s): c4bc471

Training in progress, step 1

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "o_proj",
24
  "k_proj",
25
  "gate_proj",
26
- "down_proj",
27
- "q_proj",
28
  "v_proj",
29
- "up_proj"
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "down_proj",
24
  "k_proj",
25
  "gate_proj",
26
+ "up_proj",
 
27
  "v_proj",
28
+ "o_proj",
29
+ "q_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b59ae0c65fd66d333a6afa6a3e0eb355737767312007a55a6e7f7a51aaecf029
3
  size 80013120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec6cbd1c2938d07aee1a5389dfc30d49c460718667389cc09d75a1fb980d99e6
3
  size 80013120
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:af0fc664b0f78b5edce403061af19e752e893c93796aa5a2da8ae2299e61a712
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5e76f48b7fe2632a7d86e0199b2f33c9d5f9c8750a05025001349fc1c945098
3
  size 6776