cimol commited on
Commit
eceda6c
·
verified ·
1 Parent(s): 6f2a692

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -116
README.md CHANGED
@@ -14,122 +14,7 @@ model-index:
14
  should probably proofread and complete it, then remove this comment. -->
15
 
16
  [<img src="https://raw.githubusercontent.com/axolotl-ai-cloud/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="200" height="32"/>](https://github.com/axolotl-ai-cloud/axolotl)
17
- <details><summary>See axolotl config</summary>
18
-
19
- axolotl version: `0.4.1`
20
- ```yaml
21
- adapter: lora
22
- base_model: NousResearch/Genstruct-7B
23
- bf16: true
24
- chat_template: llama3
25
- data_processes: 54
26
- dataset_prepared_path: null
27
- datasets:
28
- - data_files:
29
- - c99bcfaf72b4ce96_train_data.json
30
- ds_type: json
31
- format: custom
32
- path: /workspace/input_data/c99bcfaf72b4ce96_train_data.json
33
- type:
34
- field_input: dense_caption
35
- field_instruction: source
36
- field_output: short_caption
37
- format: '{instruction} {input}'
38
- no_input_format: '{instruction}'
39
- system_format: '{system}'
40
- system_prompt: ''
41
- debug: null
42
- deepspeed: null
43
- device_map: auto
44
- distributed_training:
45
- multi_gpu: true
46
- num_gpus: 2
47
- do_eval: true
48
- early_stopping_patience: 5
49
- eval_batch_size: 8
50
- eval_max_new_tokens: 128
51
- eval_steps: 150
52
- eval_table_size: null
53
- evals_per_epoch: null
54
- flash_attention: true
55
- fp16: false
56
- fsdp:
57
- - full_shard
58
- fsdp_config:
59
- activation_checkpointing: false
60
- backward_prefetch: BACKWARD_POST
61
- forward_prefetch: FORWARD_POST
62
- fsdp_min_num_params: 1000000000
63
- limit_all_gathers: true
64
- mixed_precision: bf16
65
- sharding_strategy: FULL_SHARD
66
- gradient_accumulation_steps: 2
67
- gradient_checkpointing: true
68
- group_by_length: true
69
- hub_ignore_patterns:
70
- - README.md
71
- - config.json
72
- hub_model_id: cimol/bb8db7d9-f5f8-49e9-9bc8-c9398d45aae8
73
- hub_repo: null
74
- hub_strategy: end
75
- hub_token: null
76
- learning_rate: 0.00015
77
- load_in_4bit: false
78
- load_in_8bit: false
79
- local_rank: null
80
- logging_steps: 10
81
- lora_alpha: 128
82
- lora_dropout: 0.3
83
- lora_fan_in_fan_out: null
84
- lora_model_dir: null
85
- lora_r: 64
86
- lora_target_linear: true
87
- lr_scheduler: cosine
88
- lr_scheduler_warmup_steps: 100
89
- max_grad_norm: 0.5
90
- max_memory:
91
- 0: 80GB
92
- 1: 80GB
93
- max_steps: 300
94
- micro_batch_size: 8
95
- mlflow_experiment_name: /tmp/c99bcfaf72b4ce96_train_data.json
96
- model_type: AutoModelForCausalLM
97
- num_epochs: 3
98
- optim_args:
99
- adam_beta1: 0.9
100
- adam_beta2: 0.95
101
- adam_epsilon: 1e-8
102
- optimizer: adamw_torch
103
- output_dir: miner_id_24
104
- pad_to_sequence_len: true
105
- resume_from_checkpoint: null
106
- s2_attention: null
107
- sample_packing: false
108
- save_steps: 150
109
- saves_per_epoch: null
110
- seed: 17333
111
- sequence_len: 1024
112
- strict: false
113
- tf32: true
114
- tokenizer_type: AutoTokenizer
115
- total_train_batch_size: 32
116
- train_batch_size: 16
117
- train_on_inputs: false
118
- trust_remote_code: true
119
- val_set_size: 0.05
120
- wandb_entity: null
121
- wandb_mode: online
122
- wandb_name: 5fe4e4a5-962b-4cf1-ad5b-1719042dd13b
123
- wandb_project: Gradients-On-Demand
124
- wandb_run: your_name
125
- wandb_runid: 5fe4e4a5-962b-4cf1-ad5b-1719042dd13b
126
- warmup_steps: 100
127
- weight_decay: 0.005
128
- xformers_attention: null
129
-
130
- ```
131
-
132
- </details><br>
133
 
134
  # bb8db7d9-f5f8-49e9-9bc8-c9398d45aae8
135
 
 
14
  should probably proofread and complete it, then remove this comment. -->
15
 
16
  [<img src="https://raw.githubusercontent.com/axolotl-ai-cloud/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="200" height="32"/>](https://github.com/axolotl-ai-cloud/axolotl)
17
+ <br>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
  # bb8db7d9-f5f8-49e9-9bc8-c9398d45aae8
20