File size: 288 Bytes
a0f6b00 |
1 2 3 4 5 6 7 |
base_model: cognitivecomputations/Dolphin3.0-Llama3.2-3B+bunnycore/Llama-3.2-3B-R1-lora
dtype: bfloat16
merge_method: passthrough
models:
- model: cognitivecomputations/Dolphin3.0-Llama3.2-3B+bunnycore/Llama-3.2-3B-R1-lora
tokenizer_source: cognitivecomputations/Dolphin3.0-Llama3.2-3B
|