File size: 566 Bytes
0298ad3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
models:
  - model: INSAIT-Institute/BgGPT-Gemma-2-9B-IT-v1.0
    layer_range: [0, 42]
  - model: lemon07r/Gemma-2-Ataraxy-9B
    layer_range: [0, 42]
  - model: Gunulhona/Gemma-Ko-Merge
    layer_range: [0, 42]
  - model: TheDrummer/Tiger-Gemma-9B-v3
    layer_range: [0, 42]
  - model: anthracite-org/magnum-v3-9b-customgemma2
    layer_range: [0, 42]
  - model: princeton-nlp/gemma-2-9b-it-SimPO
    layer_range: [0, 42]
  - model: Metin/Gemma-2-9b-it-TR-DPO-V1
    layer_range: [0, 42]
merge_method: model_stock
base_model: Gunulhona/Gemma-Ko-Merge
dtype: float16