open-r1-eval-leaderboard
/
eval_results
/mistralai
/Mixtral-8x22B-Instruct-v0.1
/main
/agieval
/results_2024-04-17T15-53-13.386405.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 2880835.852573184, | |
"end_time": 2886207.986741786, | |
"total_evaluation_time_secondes": "5372.1341686020605", | |
"model_name": "mistralai/Mixtral-8x22B-Instruct-v0.1", | |
"model_sha": "796bc4393fd5e7e0c0ff1c44de2526419f163003", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "263.69 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|agieval:aqua-rat|0": { | |
"acc": 0.2992125984251969, | |
"acc_stderr": 0.028788750779051485, | |
"acc_norm": 0.3031496062992126, | |
"acc_norm_stderr": 0.028896019290369876 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"acc": 0.6095238095238096, | |
"acc_stderr": 0.03374578018258081, | |
"acc_norm": 0.6047619047619047, | |
"acc_norm_stderr": 0.03381804275590871 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"acc": 0.3864734299516908, | |
"acc_stderr": 0.03392679997822462, | |
"acc_norm": 0.3961352657004831, | |
"acc_norm_stderr": 0.0340767350076416 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"acc": 0.47560975609756095, | |
"acc_stderr": 0.03190579986955095, | |
"acc_norm": 0.47560975609756095, | |
"acc_norm_stderr": 0.03190579986955095 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"acc": 0.8529411764705882, | |
"acc_stderr": 0.020279402936174584, | |
"acc_norm": 0.8725490196078431, | |
"acc_norm_stderr": 0.01909486481386516 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"acc": 0.7035175879396985, | |
"acc_stderr": 0.03245669931306982, | |
"acc_norm": 0.7035175879396985, | |
"acc_norm_stderr": 0.03245669931306982 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"acc": 0.6468085106382979, | |
"acc_stderr": 0.031245325202761923, | |
"acc_norm": 0.6382978723404256, | |
"acc_norm_stderr": 0.03141082197596241 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"acc": 0.3475783475783476, | |
"acc_stderr": 0.025454028021011457, | |
"acc_norm": 0.35327635327635326, | |
"acc_norm_stderr": 0.025549513358078212 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"acc": 0.49, | |
"acc_stderr": 0.035436970729343674, | |
"acc_norm": 0.495, | |
"acc_norm_stderr": 0.03544228800309697 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"acc": 0.4792626728110599, | |
"acc_stderr": 0.019594738825317357, | |
"acc_norm": 0.47619047619047616, | |
"acc_norm_stderr": 0.019589365504939123 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"acc": 0.5299539170506913, | |
"acc_stderr": 0.01957638934467575, | |
"acc_norm": 0.5268817204301075, | |
"acc_norm_stderr": 0.01958324924350952 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"acc": 0.28695652173913044, | |
"acc_stderr": 0.029891541673635467, | |
"acc_norm": 0.2608695652173913, | |
"acc_norm_stderr": 0.029017133559381268 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"acc": 0.692156862745098, | |
"acc_stderr": 0.020460116941629386, | |
"acc_norm": 0.6607843137254902, | |
"acc_norm_stderr": 0.0209849981245291 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"acc": 0.7323420074349443, | |
"acc_stderr": 0.027044545314587293, | |
"acc_norm": 0.7174721189591078, | |
"acc_norm_stderr": 0.027502094728594356 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"acc": 0.8446601941747572, | |
"acc_stderr": 0.025299122760403032, | |
"acc_norm": 0.8398058252427184, | |
"acc_norm_stderr": 0.025617448429814785 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"acc": 0.5728155339805825, | |
"acc_stderr": 0.03454921537431908, | |
"acc_norm": 0.5679611650485437, | |
"acc_norm_stderr": 0.0345974255383149 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"acc": 0.5409090909090909, | |
"acc_stderr": 0.03367359074425883, | |
"acc_norm": 0.5045454545454545, | |
"acc_norm_stderr": 0.0337854727395188 | |
}, | |
"lighteval|agieval:_average|0": { | |
"acc": 0.5582777657335614, | |
"acc_stderr": 0.028431106940623268, | |
"acc_norm": 0.5527534120813397, | |
"acc_norm_stderr": 0.028431057191537976 | |
}, | |
"all": { | |
"acc": 0.5582777657335614, | |
"acc_stderr": 0.028431106940623268, | |
"acc_norm": 0.5527534120813397, | |
"acc_norm_stderr": 0.028431057191537976 | |
} | |
}, | |
"versions": { | |
"lighteval|agieval:aqua-rat|0": 0, | |
"lighteval|agieval:gaokao-biology|0": 0, | |
"lighteval|agieval:gaokao-chemistry|0": 0, | |
"lighteval|agieval:gaokao-chinese|0": 0, | |
"lighteval|agieval:gaokao-english|0": 0, | |
"lighteval|agieval:gaokao-geography|0": 0, | |
"lighteval|agieval:gaokao-history|0": 0, | |
"lighteval|agieval:gaokao-mathqa|0": 0, | |
"lighteval|agieval:gaokao-physics|0": 0, | |
"lighteval|agieval:logiqa-en|0": 0, | |
"lighteval|agieval:logiqa-zh|0": 0, | |
"lighteval|agieval:lsat-ar|0": 0, | |
"lighteval|agieval:lsat-lr|0": 0, | |
"lighteval|agieval:lsat-rc|0": 0, | |
"lighteval|agieval:sat-en|0": 0, | |
"lighteval|agieval:sat-en-without-passage|0": 0, | |
"lighteval|agieval:sat-math|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|agieval:aqua-rat": { | |
"name": "agieval:aqua-rat", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-aqua-rat", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 254, | |
"effective_num_docs": 254, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-biology": { | |
"name": "agieval:gaokao-biology", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-biology", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chemistry": { | |
"name": "agieval:gaokao-chemistry", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chemistry", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 207, | |
"effective_num_docs": 207, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chinese": { | |
"name": "agieval:gaokao-chinese", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chinese", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 246, | |
"effective_num_docs": 246, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-english": { | |
"name": "agieval:gaokao-english", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-english", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-geography": { | |
"name": "agieval:gaokao-geography", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-geography", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 199, | |
"effective_num_docs": 199, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-history": { | |
"name": "agieval:gaokao-history", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-history", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-mathqa": { | |
"name": "agieval:gaokao-mathqa", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-mathqa", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 351, | |
"effective_num_docs": 351, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-physics": { | |
"name": "agieval:gaokao-physics", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-physics", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 200, | |
"effective_num_docs": 200, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-en": { | |
"name": "agieval:logiqa-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-zh": { | |
"name": "agieval:logiqa-zh", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-zh", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-ar": { | |
"name": "agieval:lsat-ar", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-ar", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 230, | |
"effective_num_docs": 230, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-lr": { | |
"name": "agieval:lsat-lr", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-lr", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 510, | |
"effective_num_docs": 510, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-rc": { | |
"name": "agieval:lsat-rc", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-rc", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 269, | |
"effective_num_docs": 269, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en": { | |
"name": "agieval:sat-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en-without-passage": { | |
"name": "agieval:sat-en-without-passage", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en-without-passage", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-math": { | |
"name": "agieval:sat-math", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-math", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 220, | |
"effective_num_docs": 220, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|agieval:aqua-rat|0": { | |
"hashes": { | |
"hash_examples": "f09607f69e5b7525", | |
"hash_full_prompts": "30aa6d1c15f43bdb", | |
"hash_input_tokens": "f5f2ed0210b16d6e", | |
"hash_cont_tokens": "5063594d7b3de3b0" | |
}, | |
"truncated": 0, | |
"non_truncated": 254, | |
"padded": 1270, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"hashes": { | |
"hash_examples": "f262eaf4a72db963", | |
"hash_full_prompts": "39bc2a3280111377", | |
"hash_input_tokens": "b02d46998eaf8258", | |
"hash_cont_tokens": "6febacc6fab06b5f" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 840, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"hashes": { | |
"hash_examples": "47f2e649f58d9da5", | |
"hash_full_prompts": "5c41e2e35d67a9d5", | |
"hash_input_tokens": "5519544dbe246167", | |
"hash_cont_tokens": "3015c398cf0221cb" | |
}, | |
"truncated": 0, | |
"non_truncated": 207, | |
"padded": 830, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"hashes": { | |
"hash_examples": "1010b21fde4726ab", | |
"hash_full_prompts": "9d992d731aa2a7fd", | |
"hash_input_tokens": "39899c6a33c3f4d6", | |
"hash_cont_tokens": "5052f44dd984c0b0" | |
}, | |
"truncated": 0, | |
"non_truncated": 246, | |
"padded": 981, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"hashes": { | |
"hash_examples": "4864e492a350ae93", | |
"hash_full_prompts": "77bba3f15a7223eb", | |
"hash_input_tokens": "fc004e5ff2244bca", | |
"hash_cont_tokens": "dc26c56711dabfa8" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1222, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"hashes": { | |
"hash_examples": "ec3a021e37650e7d", | |
"hash_full_prompts": "f5e795842f196e4b", | |
"hash_input_tokens": "55720d42b320b4ad", | |
"hash_cont_tokens": "5e5ef262231ad3a8" | |
}, | |
"truncated": 0, | |
"non_truncated": 199, | |
"padded": 794, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"hashes": { | |
"hash_examples": "b3fad1596f1ae1f9", | |
"hash_full_prompts": "f5837cba942d029d", | |
"hash_input_tokens": "7e7ae5f9574a4cfa", | |
"hash_cont_tokens": "57c8595d5c602f81" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 935, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"hashes": { | |
"hash_examples": "1d1088556861b0b0", | |
"hash_full_prompts": "d5c6926ec83538c2", | |
"hash_input_tokens": "28a87c786fb08707", | |
"hash_cont_tokens": "50cebf57e84fdfaf" | |
}, | |
"truncated": 0, | |
"non_truncated": 351, | |
"padded": 1401, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"hashes": { | |
"hash_examples": "eb05f035c7bfca2f", | |
"hash_full_prompts": "9f49be637d41f4e6", | |
"hash_input_tokens": "80066782278c4af3", | |
"hash_cont_tokens": "f955a15380a0526a" | |
}, | |
"truncated": 0, | |
"non_truncated": 200, | |
"padded": 797, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"hashes": { | |
"hash_examples": "0a688a45f69c21e0", | |
"hash_full_prompts": "84f8c9def284d077", | |
"hash_input_tokens": "5cf3557343744186", | |
"hash_cont_tokens": "d002e63e849fbeda" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2597, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"hashes": { | |
"hash_examples": "620d6888b6012ea5", | |
"hash_full_prompts": "c23d476282290821", | |
"hash_input_tokens": "010ade50475cf41d", | |
"hash_cont_tokens": "de96a56505fe0ffe" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2581, | |
"non_padded": 23, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"hashes": { | |
"hash_examples": "627c8f5ccd5da209", | |
"hash_full_prompts": "bfe2f11ca677b5c3", | |
"hash_input_tokens": "965e67c4bc4980c7", | |
"hash_cont_tokens": "af464389b50e459e" | |
}, | |
"truncated": 0, | |
"non_truncated": 230, | |
"padded": 1140, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"hashes": { | |
"hash_examples": "794641c86de172f5", | |
"hash_full_prompts": "d83a7fe798a76676", | |
"hash_input_tokens": "2e371c1528fc3ad6", | |
"hash_cont_tokens": "5c463c8d5bce251f" | |
}, | |
"truncated": 0, | |
"non_truncated": 510, | |
"padded": 2525, | |
"non_padded": 25, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"hashes": { | |
"hash_examples": "35981ed917ea01cf", | |
"hash_full_prompts": "25f8e67bddcb5ac5", | |
"hash_input_tokens": "1f8650391b3bab7d", | |
"hash_cont_tokens": "b4402bbe635c77de" | |
}, | |
"truncated": 0, | |
"non_truncated": 269, | |
"padded": 1345, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"hashes": { | |
"hash_examples": "041c39c646536a1e", | |
"hash_full_prompts": "b66cdda514c24e58", | |
"hash_input_tokens": "6686af3a6a1352b0", | |
"hash_cont_tokens": "5e6010f1bf063adf" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 821, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"hashes": { | |
"hash_examples": "e4d9284367dff68f", | |
"hash_full_prompts": "7c1c47d333355e18", | |
"hash_input_tokens": "6825d03bfea6fae9", | |
"hash_cont_tokens": "5e6010f1bf063adf" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 817, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"hashes": { | |
"hash_examples": "01db7291603fc1a0", | |
"hash_full_prompts": "dcb8fb4427c060ac", | |
"hash_input_tokens": "d7ef7869e71192c8", | |
"hash_cont_tokens": "01937a958f6113a8" | |
}, | |
"truncated": 0, | |
"non_truncated": 220, | |
"padded": 876, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "da3af66181f18ddf", | |
"hash_full_prompts": "108e627f7ba978c5", | |
"hash_input_tokens": "338d25db35d727a1", | |
"hash_cont_tokens": "b66107ac75b5cf3a" | |
}, | |
"truncated": 0, | |
"non_truncated": 5151, | |
"padded": 21772, | |
"non_padded": 92, | |
"num_truncated_few_shots": 0 | |
} | |
} |