artena commited on
Commit
4f2e33f
·
verified ·
1 Parent(s): 9e31297

Upload leaderboard_data.jsonl

Browse files
Files changed (1) hide show
  1. leaderboard_data.jsonl +1 -1
leaderboard_data.jsonl CHANGED
@@ -8,4 +8,4 @@
8
  {"Model":"google/gemma-2-9b", "Precision": "bfloat16", "#Params (B)": 9.24, "MMLU": 60.09, "GSM8K": 25.78, "ARC Easy": 68.31, "Winogrande": 65.15, "TruthfulQA": 39.69, "Hellaswag": 45.32, "Belebele": 86.78, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-9b"}
9
  {"Model":"meta-llama/Llama-3.1-70B", "Precision": "bfloat16", "#Params (B)": 70, "MMLU": 67.50, "GSM8K": 72.40, "ARC Easy": 70.92, "Winogrande": 64.01, "TruthfulQA": 43.59, "Hellaswag": 46.39, "Belebele": 90.02, "Hub License": "llama3.1", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.1-70B"}
10
  {"Model":"google/gemma-2-27b", "Precision": "bfloat16", "#Params (B)": 27.2, "MMLU": 64.82, "GSM8K": 68.69, "ARC Easy": 77.40, "Winogrande": 66.77, "TruthfulQA": 42.06, "Hellaswag": 50.82, "Belebele": 89.22, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-27b"}
11
- {"Model":"meta-llama/Llama-3.3-70B", "Precision": "bfloat16", "#Params (B)": 70, "MMLU": 71.46, "GSM8K": 80.97, "ARC Easy": 70.66, "Winogrande": 59.83, "TruthfulQA": 45.61, "Hellaswag": 46.05, "Belebele": 89.33, "Hub License": "llama3.3", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.3-70B"}
 
8
  {"Model":"google/gemma-2-9b", "Precision": "bfloat16", "#Params (B)": 9.24, "MMLU": 60.09, "GSM8K": 25.78, "ARC Easy": 68.31, "Winogrande": 65.15, "TruthfulQA": 39.69, "Hellaswag": 45.32, "Belebele": 86.78, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-9b"}
9
  {"Model":"meta-llama/Llama-3.1-70B", "Precision": "bfloat16", "#Params (B)": 70, "MMLU": 67.50, "GSM8K": 72.40, "ARC Easy": 70.92, "Winogrande": 64.01, "TruthfulQA": 43.59, "Hellaswag": 46.39, "Belebele": 90.02, "Hub License": "llama3.1", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.1-70B"}
10
  {"Model":"google/gemma-2-27b", "Precision": "bfloat16", "#Params (B)": 27.2, "MMLU": 64.82, "GSM8K": 68.69, "ARC Easy": 77.40, "Winogrande": 66.77, "TruthfulQA": 42.06, "Hellaswag": 50.82, "Belebele": 89.22, "Hub License": "gemma", "Model sha": "main", "model_name_for_query": "google/gemma-2-27b"}
11
+ {"Model":"meta-llama/Llama-3.3-70B-Instruct", "Precision": "bfloat16", "#Params (B)": 70, "MMLU": 71.46, "GSM8K": 80.97, "ARC Easy": 70.66, "Winogrande": 59.83, "TruthfulQA": 45.61, "Hellaswag": 46.05, "Belebele": 89.33, "Hub License": "llama3.3", "Model sha": "main", "model_name_for_query": "meta-llama/Llama-3.3-70B-Instruct"}