base_model: meta-llama/Llama-3.1-8B-Instruct dtype: bfloat16 merge_method: ties parameters: int8_mask: 1.0 normalize: 0.0 slices: - sources: - layer_range: [0, 32] model: BimatrixTemp/BIMATRIX-Llama-3.1-Lora parameters: density: 0.5 weight: 0.5 - layer_range: [0, 32] model: /workspace/mssong/finetune/mergekit/models/fcc-llama-8b parameters: density: 0.5 weight: 0.5 - layer_range: [0, 32] model: meta-llama/Llama-3.1-8B-Instruct