Add random LoRA adapter with seed 0
Browse files
reward/adapter_config.json
CHANGED
|
@@ -28,20 +28,20 @@
|
|
| 28 |
"rank_pattern": {},
|
| 29 |
"revision": null,
|
| 30 |
"target_modules": [
|
| 31 |
-
"model.layers.
|
| 32 |
"model.layers.0.mlp.up_proj",
|
| 33 |
-
"model.layers.0.self_attn.
|
| 34 |
-
"model.layers.1.self_attn.q_proj",
|
| 35 |
-
"model.layers.1.mlp.up_proj",
|
| 36 |
-
"model.layers.1.self_attn.o_proj",
|
| 37 |
"model.layers.0.mlp.down_proj",
|
|
|
|
| 38 |
"model.layers.1.mlp.gate_proj",
|
| 39 |
"model.layers.0.mlp.gate_proj",
|
|
|
|
| 40 |
"model.layers.1.self_attn.v_proj",
|
| 41 |
"model.layers.1.self_attn.k_proj",
|
| 42 |
"model.layers.0.self_attn.q_proj",
|
| 43 |
-
"model.layers.
|
| 44 |
-
"model.layers.
|
|
|
|
| 45 |
],
|
| 46 |
"task_type": "SEQ_CLS",
|
| 47 |
"trainable_token_indices": null,
|
|
|
|
| 28 |
"rank_pattern": {},
|
| 29 |
"revision": null,
|
| 30 |
"target_modules": [
|
| 31 |
+
"model.layers.0.self_attn.v_proj",
|
| 32 |
"model.layers.0.mlp.up_proj",
|
| 33 |
+
"model.layers.0.self_attn.o_proj",
|
|
|
|
|
|
|
|
|
|
| 34 |
"model.layers.0.mlp.down_proj",
|
| 35 |
+
"model.layers.1.self_attn.o_proj",
|
| 36 |
"model.layers.1.mlp.gate_proj",
|
| 37 |
"model.layers.0.mlp.gate_proj",
|
| 38 |
+
"model.layers.0.self_attn.k_proj",
|
| 39 |
"model.layers.1.self_attn.v_proj",
|
| 40 |
"model.layers.1.self_attn.k_proj",
|
| 41 |
"model.layers.0.self_attn.q_proj",
|
| 42 |
+
"model.layers.1.mlp.down_proj",
|
| 43 |
+
"model.layers.1.self_attn.q_proj",
|
| 44 |
+
"model.layers.1.mlp.up_proj"
|
| 45 |
],
|
| 46 |
"task_type": "SEQ_CLS",
|
| 47 |
"trainable_token_indices": null,
|
reward/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0a3d57d6dda256eabc40e534403283cf4d007229f95e132a91e51e80331aa7f7
|
| 3 |
+
size 2267808
|