Update model card merge YAML
Browse files
README.md
CHANGED
@@ -31,61 +31,61 @@ The yaml config file for this model is here:
|
|
31 |
|
32 |
```yaml
|
33 |
models:
|
34 |
-
- model: /
|
35 |
# no parameters necessary for base model
|
36 |
-
- model: /
|
37 |
parameters:
|
38 |
weight: 0.08
|
39 |
density: 0.4
|
40 |
-
- model: /
|
41 |
parameters:
|
42 |
weight: 0.08
|
43 |
density: 0.4
|
44 |
-
- model: /
|
45 |
parameters:
|
46 |
weight: 0.08
|
47 |
density: 0.4
|
48 |
-
- model: /
|
49 |
parameters:
|
50 |
weight: 0.08
|
51 |
density: 0.4
|
52 |
-
- model: /
|
53 |
parameters:
|
54 |
weight: 0.08
|
55 |
density: 0.4
|
56 |
-
- model: /
|
57 |
parameters:
|
58 |
weight: 0.08
|
59 |
density: 0.4
|
60 |
-
- model: /
|
61 |
parameters:
|
62 |
weight: 0.08
|
63 |
density: 0.4
|
64 |
-
- model: /
|
65 |
parameters:
|
66 |
weight: 0.08
|
67 |
density: 0.4
|
68 |
-
- model: /
|
69 |
parameters:
|
70 |
weight: 0.08
|
71 |
density: 0.4
|
72 |
-
- model: /
|
73 |
parameters:
|
74 |
weight: 0.08
|
75 |
density: 0.4
|
76 |
-
- model: /
|
77 |
parameters:
|
78 |
weight: 0.08
|
79 |
density: 0.4
|
80 |
-
- model: /
|
81 |
parameters:
|
82 |
weight: 0.08
|
83 |
density: 0.4
|
84 |
-
- model: /
|
85 |
parameters:
|
86 |
weight: 0.08
|
87 |
density: 0.4
|
88 |
-
- model: /
|
89 |
parameters:
|
90 |
weight: 0.08
|
91 |
density: 0.5
|
|
|
31 |
|
32 |
```yaml
|
33 |
models:
|
34 |
+
- model: mistralai/Mistral-7B-Instruct-v0.2
|
35 |
# no parameters necessary for base model
|
36 |
+
- model: ehartford/dolphin-2.2.1-mistral-7b
|
37 |
parameters:
|
38 |
weight: 0.08
|
39 |
density: 0.4
|
40 |
+
- model: SciPhi/SciPhi-Mistral-7B-32k
|
41 |
parameters:
|
42 |
weight: 0.08
|
43 |
density: 0.4
|
44 |
+
- model: ehartford/samantha-1.2-mistral-7b
|
45 |
parameters:
|
46 |
weight: 0.08
|
47 |
density: 0.4
|
48 |
+
- model: Arc53/docsgpt-7b-mistral
|
49 |
parameters:
|
50 |
weight: 0.08
|
51 |
density: 0.4
|
52 |
+
- model: berkeley-nest/Starling-LM-7B-alpha
|
53 |
parameters:
|
54 |
weight: 0.08
|
55 |
density: 0.4
|
56 |
+
- model: Q-bert/MetaMath-Cybertron-Starling
|
57 |
parameters:
|
58 |
weight: 0.08
|
59 |
density: 0.4
|
60 |
+
- model: Open-Orca/Mistral-7B-OpenOrca
|
61 |
parameters:
|
62 |
weight: 0.08
|
63 |
density: 0.4
|
64 |
+
- model: v1olet/v1olet_marcoroni-go-bruins-merge-7B
|
65 |
parameters:
|
66 |
weight: 0.08
|
67 |
density: 0.4
|
68 |
+
- model: beowolx/MistralHermes-CodePro-7B-v1
|
69 |
parameters:
|
70 |
weight: 0.08
|
71 |
density: 0.4
|
72 |
+
- model: TIGER-Lab/MAmmoTH-7B-Mistral
|
73 |
parameters:
|
74 |
weight: 0.08
|
75 |
density: 0.4
|
76 |
+
- model: teknium/OpenHermes-2.5-Mistral-7B
|
77 |
parameters:
|
78 |
weight: 0.08
|
79 |
density: 0.4
|
80 |
+
- model: Weyaxi/OpenHermes-2.5-neural-chat-v3-3-Slerp
|
81 |
parameters:
|
82 |
weight: 0.08
|
83 |
density: 0.4
|
84 |
+
- model: mlabonne/NeuralHermes-2.5-Mistral-7B
|
85 |
parameters:
|
86 |
weight: 0.08
|
87 |
density: 0.4
|
88 |
+
- model: mistralai/Mistral-7B-Instruct-v0.2
|
89 |
parameters:
|
90 |
weight: 0.08
|
91 |
density: 0.5
|