nRuaif commited on
Commit
10055d4
·
1 Parent(s): aa291af

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. adapter_config.json +7 -7
adapter_config.json CHANGED
@@ -3,23 +3,23 @@
3
  "base_model_name_or_path": "NousResearch/Llama-2-13b-hf",
4
  "bias": "none",
5
  "fan_in_fan_out": null,
6
- "inference_mode": true,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
- "lora_alpha": 16,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
- "r": 8,
15
  "revision": null,
16
  "target_modules": [
17
- "q_proj",
18
- "k_proj",
19
  "v_proj",
20
- "o_proj",
21
  "gate_proj",
22
- "up_proj",
 
 
23
  "down_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
 
3
  "base_model_name_or_path": "NousResearch/Llama-2-13b-hf",
4
  "bias": "none",
5
  "fan_in_fan_out": null,
6
+ "inference_mode": false,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
+ "lora_alpha": 8,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
+ "r": 4,
15
  "revision": null,
16
  "target_modules": [
17
+ "up_proj",
 
18
  "v_proj",
 
19
  "gate_proj",
20
+ "k_proj",
21
+ "q_proj",
22
+ "o_proj",
23
  "down_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"