Wonder-Griffin commited on
Commit
8179916
·
verified ·
1 Parent(s): b2f03c0

Delete training_params (2).json

Browse files
Files changed (1) hide show
  1. training_params (2).json +0 -49
training_params (2).json DELETED
@@ -1,49 +0,0 @@
1
- {
2
- "model": "Wonder-Griffin/TraXLMistral",
3
- "project_name": "TrainTraXLLMistral-1",
4
- "data_path": "TrainTraXLLMistral-1/autotrain-data",
5
- "train_split": "train",
6
- "valid_split": null,
7
- "add_eos_token": true,
8
- "block_size": 512,
9
- "model_max_length": 512,
10
- "padding": "right",
11
- "trainer": "default",
12
- "use_flash_attention_2": true,
13
- "log": "tensorboard",
14
- "disable_gradient_checkpointing": false,
15
- "logging_steps": -1,
16
- "eval_strategy": "steps",
17
- "save_total_limit": 1,
18
- "auto_find_batch_size": false,
19
- "mixed_precision": "fp16",
20
- "lr": 3.00003,
21
- "epochs": 3,
22
- "batch_size": 2,
23
- "warmup_ratio": 0.1,
24
- "gradient_accumulation": 4,
25
- "optimizer": "adamw_torch",
26
- "scheduler": "linear",
27
- "weight_decay": 0.0,
28
- "max_grad_norm": 1.0,
29
- "seed": 42,
30
- "chat_template": "none",
31
- "quantization": null,
32
- "target_modules": "all-linear",
33
- "merge_adapter": true,
34
- "peft": true,
35
- "lora_r": 16,
36
- "lora_alpha": 32,
37
- "lora_dropout": 0.05,
38
- "model_ref": null,
39
- "dpo_beta": 0.1,
40
- "max_prompt_length": 128,
41
- "max_completion_length": null,
42
- "prompt_text_column": "autotrain_prompt",
43
- "text_column": "autotrain_text",
44
- "rejected_text_column": "autotrain_rejected_text",
45
- "push_to_hub": true,
46
- "username": "Wonder-Griffin",
47
- "token": "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",
48
- "unsloth": false
49
- }