Lamarckvergence-14B / mergekit_config.yml
suayptalha's picture
Upload folder using huggingface_hub
7a14638 verified
raw
history blame
397 Bytes
base_model: sometimesanotion/Lamarck-14B-v0.7
dtype: bfloat16
merge_method: slerp
parameters:
t:
- filter: self_attn
value: [0.0, 0.5, 0.3, 0.7, 1.0]
- filter: mlp
value: [1.0, 0.5, 0.7, 0.3, 0.0]
- value: 0.5
slices:
- sources:
- layer_range: [0, 48]
model: sometimesanotion/Lamarck-14B-v0.7
- layer_range: [0, 48]
model: sometimesanotion/Qwenvergence-14B-v12-Prose-DS