thesunday commited on
Commit
6fa9985
·
1 Parent(s): aaddbb1

Update model card merge YAML

Browse files
Files changed (1) hide show
  1. README.md +15 -15
README.md CHANGED
@@ -31,61 +31,61 @@ The yaml config file for this model is here:
31
 
32
  ```yaml
33
  models:
34
- - model: /media/data5/hf_models/Mistral-7B-v0.1
35
  # no parameters necessary for base model
36
- - model: /media/data5/hf_models/dolphin-2.2.1-mistral-7b
37
  parameters:
38
  weight: 0.08
39
  density: 0.4
40
- - model: /media/data5/hf_models/SciPhi-Mistral-7B-32k
41
  parameters:
42
  weight: 0.08
43
  density: 0.4
44
- - model: /media/data5/hf_models/samantha-1.2-mistral-7b
45
  parameters:
46
  weight: 0.08
47
  density: 0.4
48
- - model: /media/data5/hf_models/docsgpt-7b-mistral
49
  parameters:
50
  weight: 0.08
51
  density: 0.4
52
- - model: /media/data5/hf_models/Starling-LM-7B-alpha
53
  parameters:
54
  weight: 0.08
55
  density: 0.4
56
- - model: /media/data5/hf_models/MetaMath-Cybertron-Starling
57
  parameters:
58
  weight: 0.08
59
  density: 0.4
60
- - model: /media/data5/hf_models/Mistral-7B-OpenOrca
61
  parameters:
62
  weight: 0.08
63
  density: 0.4
64
- - model: /media/data5/hf_models/v1olet_marcoroni-go-bruins-merge-7B
65
  parameters:
66
  weight: 0.08
67
  density: 0.4
68
- - model: /media/data5/hf_models/MistralHermes-CodePro-7B-v1
69
  parameters:
70
  weight: 0.08
71
  density: 0.4
72
- - model: /media/data5/hf_models/MAmmoTH-7B-Mistral
73
  parameters:
74
  weight: 0.08
75
  density: 0.4
76
- - model: /media/data5/hf_models/OpenHermes-2.5-Mistral-7B
77
  parameters:
78
  weight: 0.08
79
  density: 0.4
80
- - model: /media/data5/hf_models/OpenHermes-2.5-neural-chat-v3-3-Slerp
81
  parameters:
82
  weight: 0.08
83
  density: 0.4
84
- - model: /media/data5/hf_models/NeuralHermes-2.5-Mistral-7B
85
  parameters:
86
  weight: 0.08
87
  density: 0.4
88
- - model: /media/data5/hf_models/Mistral-7B-Instruct-v0.2
89
  parameters:
90
  weight: 0.08
91
  density: 0.5
 
31
 
32
  ```yaml
33
  models:
34
+ - model: mistralai/Mistral-7B-Instruct-v0.2
35
  # no parameters necessary for base model
36
+ - model: ehartford/dolphin-2.2.1-mistral-7b
37
  parameters:
38
  weight: 0.08
39
  density: 0.4
40
+ - model: SciPhi/SciPhi-Mistral-7B-32k
41
  parameters:
42
  weight: 0.08
43
  density: 0.4
44
+ - model: ehartford/samantha-1.2-mistral-7b
45
  parameters:
46
  weight: 0.08
47
  density: 0.4
48
+ - model: Arc53/docsgpt-7b-mistral
49
  parameters:
50
  weight: 0.08
51
  density: 0.4
52
+ - model: berkeley-nest/Starling-LM-7B-alpha
53
  parameters:
54
  weight: 0.08
55
  density: 0.4
56
+ - model: Q-bert/MetaMath-Cybertron-Starling
57
  parameters:
58
  weight: 0.08
59
  density: 0.4
60
+ - model: Open-Orca/Mistral-7B-OpenOrca
61
  parameters:
62
  weight: 0.08
63
  density: 0.4
64
+ - model: v1olet/v1olet_marcoroni-go-bruins-merge-7B
65
  parameters:
66
  weight: 0.08
67
  density: 0.4
68
+ - model: beowolx/MistralHermes-CodePro-7B-v1
69
  parameters:
70
  weight: 0.08
71
  density: 0.4
72
+ - model: TIGER-Lab/MAmmoTH-7B-Mistral
73
  parameters:
74
  weight: 0.08
75
  density: 0.4
76
+ - model: teknium/OpenHermes-2.5-Mistral-7B
77
  parameters:
78
  weight: 0.08
79
  density: 0.4
80
+ - model: Weyaxi/OpenHermes-2.5-neural-chat-v3-3-Slerp
81
  parameters:
82
  weight: 0.08
83
  density: 0.4
84
+ - model: mlabonne/NeuralHermes-2.5-Mistral-7B
85
  parameters:
86
  weight: 0.08
87
  density: 0.4
88
+ - model: mistralai/Mistral-7B-Instruct-v0.2
89
  parameters:
90
  weight: 0.08
91
  density: 0.5