Sandykgoyal commited on
Commit
dc1a6c3
·
verified ·
1 Parent(s): 3564081

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +26 -6
adapter_config.json CHANGED
@@ -1,8 +1,28 @@
1
  {
2
- "base_model_name_or_path": "meta-llama/Llama-3.2-3B-Instruct",
3
- "peft_type": "LORA",
4
- "r": 8,
5
- "lora_alpha": 16,
6
- "lora_dropout": 0.0,
7
- "task_type": "CAUSAL_LM"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  }
 
1
  {
2
+ "batch_size": 1,
3
+ "config": null,
4
+ "data": "./data",
5
+ "fine_tune_type": "lora",
6
+ "grad_checkpoint": null,
7
+ "iters": 100,
8
+ "learning_rate": 1e-05,
9
+ "lora_parameters": {
10
+ "rank": 8,
11
+ "alpha": 16,
12
+ "dropout": 0.0,
13
+ "scale": 10.0
14
+ },
15
+ "lr_schedule": null,
16
+ "max_seq_length": 2048,
17
+ "model": "meta-llama/Llama-3.2-3B-Instruct",
18
+ "num_layers": 16,
19
+ "resume_adapter_file": null,
20
+ "save_every": 100,
21
+ "seed": 0,
22
+ "steps_per_eval": 200,
23
+ "steps_per_report": 10,
24
+ "test": false,
25
+ "test_batches": 500,
26
+ "train": true,
27
+ "val_batches": 25
28
  }