Update README.md
Browse files
README.md
CHANGED
@@ -172,12 +172,12 @@ lm_eval \
|
|
172 |
|
173 |
| Metric | mistralai/Mixtral-8x7B-Instruct-v0.1 | neuralmagic/Mixtral-8x7B-Instruct-v0.1-FP8 |
|
174 |
|-----------------------------------------|:---------------------------------:|:-------------------------------------------:|
|
175 |
-
| ARC-Challenge (Acc-Norm, 25-shot) | 70.48 |
|
176 |
-
| GSM8K (Strict-Match, 5-shot) | 65.50 |
|
177 |
-
| HellaSwag (Acc-Norm, 10-shot) | 87.33 |
|
178 |
-
| MMLU (Acc, 5-shot) | 70.30 |
|
179 |
-
| TruthfulQA (MC2, 0-shot) | 64.81 |
|
180 |
-
| Winogrande (Acc, 5-shot) | 82.24 |
|
181 |
-
| **Average Score** | **73.44** |
|
182 |
-
| **Recovery (%)** | **100.00** |
|
183 |
|
|
|
172 |
|
173 |
| Metric | mistralai/Mixtral-8x7B-Instruct-v0.1 | neuralmagic/Mixtral-8x7B-Instruct-v0.1-FP8 |
|
174 |
|-----------------------------------------|:---------------------------------:|:-------------------------------------------:|
|
175 |
+
| ARC-Challenge (Acc-Norm, 25-shot) | 70.48 | 69.54 |
|
176 |
+
| GSM8K (Strict-Match, 5-shot) | 65.50 | 64.29 |
|
177 |
+
| HellaSwag (Acc-Norm, 10-shot) | 87.33 | 86.96 |
|
178 |
+
| MMLU (Acc, 5-shot) | 70.30 | 69.97 |
|
179 |
+
| TruthfulQA (MC2, 0-shot) | 64.81 | 63.89 |
|
180 |
+
| Winogrande (Acc, 5-shot) | 82.24 | 81.29 |
|
181 |
+
| **Average Score** | **73.44** | **72.66** |
|
182 |
+
| **Recovery (%)** | **100.00** | **98.94** |
|
183 |
|