| { | |
| "alpha_pattern": {}, | |
| "auto_mapping": null, | |
| "base_model_name_or_path": "local_pretrained_llm/Meta-Llama-3.1-8B-Instruct", | |
| "bias": "none", | |
| "fan_in_fan_out": false, | |
| "inference_mode": true, | |
| "init_lora_weights": true, | |
| "layers_pattern": null, | |
| "layers_to_transform": null, | |
| "loftq_config": {}, | |
| "lora_alpha": 16, | |
| "lora_dropout": 0.05, | |
| "megatron_config": null, | |
| "megatron_core": "megatron.core", | |
| "modules_to_save": null, | |
| "peft_type": "LORA", | |
| "r": 64, | |
| "rank_pattern": {}, | |
| "revision": null, | |
| "target_modules": [ | |
| "wh", | |
| "output_projection", | |
| "down_proj", | |
| "up_proj", | |
| "node_embedding", | |
| "embed_gvp_input_features", | |
| "wg", | |
| "fc1", | |
| "fc2", | |
| "ws", | |
| "out_proj", | |
| "gate_proj", | |
| "wv", | |
| "mm_struc_projector", | |
| "v_proj", | |
| "k_proj", | |
| "q_proj", | |
| "o_proj", | |
| "embed_confidence", | |
| "embed_gvp_output" | |
| ], | |
| "task_type": "CAUSAL_LM", | |
| "use_dora": false, | |
| "use_rslora": false | |
| } |