File size: 525 Bytes
83f2f1e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
models:
- model: Sao10K/L3-8B-Stheno-v3.2
parameters:
density: 0.5
weight: 1.0
- model: Gryphe/Pantheon-RP-1.0-8b-Llama-3
parameters:
density: 0.6
weight: 1.0
- model: Gryphe/Tiamat-8b-1.2-Llama-3-DPO
parameters:
density: 0.4
weight: 0.75
- model: cgato/L3-TheSpice-8b-v0.8.3
parameters:
density: 0.5
weight: 0.80
merge_method: dare_ties
tokenizer_source: base
base_model: Gryphe/Pantheon-RP-1.0-8b-Llama-3
parameters:
int8_mask: true
dtype: bfloat16
|