open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/zephyr-7b-gemma-v0.1
/main
/agieval
/results_2024-03-28T16-40-43.592094.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 549406.183073153, | |
"end_time": 549696.457783003, | |
"total_evaluation_time_secondes": "290.2747098499676", | |
"model_name": "HuggingFaceH4/zephyr-7b-gemma-v0.1", | |
"model_sha": "03b3427d0ed07d2e0f86c0a7e53d82d4beef9540", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "15.9 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|agieval:aqua-rat|0": { | |
"acc": 0.2125984251968504, | |
"acc_stderr": 0.025722779833723054, | |
"acc_norm": 0.2125984251968504, | |
"acc_norm_stderr": 0.02572277983372305 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"acc": 0.35714285714285715, | |
"acc_stderr": 0.033144012047664914, | |
"acc_norm": 0.38095238095238093, | |
"acc_norm_stderr": 0.03359110046749989 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"acc": 0.26570048309178745, | |
"acc_stderr": 0.030775079470103068, | |
"acc_norm": 0.2946859903381642, | |
"acc_norm_stderr": 0.03176416108295297 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"acc": 0.2967479674796748, | |
"acc_stderr": 0.029185445861037912, | |
"acc_norm": 0.3008130081300813, | |
"acc_norm_stderr": 0.02929961637067325 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"acc": 0.6111111111111112, | |
"acc_stderr": 0.027914055510468008, | |
"acc_norm": 0.5620915032679739, | |
"acc_norm_stderr": 0.028408302020332687 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"acc": 0.46733668341708545, | |
"acc_stderr": 0.03545755092964412, | |
"acc_norm": 0.457286432160804, | |
"acc_norm_stderr": 0.035403557368657 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"acc": 0.451063829787234, | |
"acc_stderr": 0.032529096196131965, | |
"acc_norm": 0.4127659574468085, | |
"acc_norm_stderr": 0.03218471141400352 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"acc": 0.27635327635327633, | |
"acc_stderr": 0.023903505003127223, | |
"acc_norm": 0.27635327635327633, | |
"acc_norm_stderr": 0.023903505003127216 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"acc": 0.36, | |
"acc_stderr": 0.03402629784040017, | |
"acc_norm": 0.37, | |
"acc_norm_stderr": 0.0342250899767933 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"acc": 0.35944700460829493, | |
"acc_stderr": 0.018820809084481267, | |
"acc_norm": 0.3486943164362519, | |
"acc_norm_stderr": 0.018692104055797923 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"acc": 0.31336405529953915, | |
"acc_stderr": 0.01819412517802074, | |
"acc_norm": 0.36251920122887865, | |
"acc_norm_stderr": 0.018855687979585072 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"acc": 0.2217391304347826, | |
"acc_stderr": 0.027451496604058913, | |
"acc_norm": 0.16521739130434782, | |
"acc_norm_stderr": 0.02454125880854541 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"acc": 0.3627450980392157, | |
"acc_stderr": 0.021310737393780418, | |
"acc_norm": 0.3215686274509804, | |
"acc_norm_stderr": 0.020702886736741092 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"acc": 0.5055762081784386, | |
"acc_stderr": 0.03054046165569704, | |
"acc_norm": 0.3754646840148699, | |
"acc_norm_stderr": 0.029579828435446678 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"acc": 0.6747572815533981, | |
"acc_stderr": 0.03271904737596389, | |
"acc_norm": 0.49514563106796117, | |
"acc_norm_stderr": 0.03491986890584391 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"acc": 0.46601941747572817, | |
"acc_stderr": 0.03484077510347999, | |
"acc_norm": 0.3446601941747573, | |
"acc_norm_stderr": 0.03319341285859081 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"acc": 0.42272727272727273, | |
"acc_stderr": 0.03338094264093533, | |
"acc_norm": 0.3409090909090909, | |
"acc_norm_stderr": 0.03203095553573995 | |
}, | |
"lighteval|agieval:_average|0": { | |
"acc": 0.38967235893509095, | |
"acc_stderr": 0.028818601042865762, | |
"acc_norm": 0.35421918296667515, | |
"acc_norm_stderr": 0.028648166285532566 | |
}, | |
"all": { | |
"acc": 0.38967235893509095, | |
"acc_stderr": 0.028818601042865762, | |
"acc_norm": 0.35421918296667515, | |
"acc_norm_stderr": 0.028648166285532566 | |
} | |
}, | |
"versions": { | |
"lighteval|agieval:aqua-rat|0": 0, | |
"lighteval|agieval:gaokao-biology|0": 0, | |
"lighteval|agieval:gaokao-chemistry|0": 0, | |
"lighteval|agieval:gaokao-chinese|0": 0, | |
"lighteval|agieval:gaokao-english|0": 0, | |
"lighteval|agieval:gaokao-geography|0": 0, | |
"lighteval|agieval:gaokao-history|0": 0, | |
"lighteval|agieval:gaokao-mathqa|0": 0, | |
"lighteval|agieval:gaokao-physics|0": 0, | |
"lighteval|agieval:logiqa-en|0": 0, | |
"lighteval|agieval:logiqa-zh|0": 0, | |
"lighteval|agieval:lsat-ar|0": 0, | |
"lighteval|agieval:lsat-lr|0": 0, | |
"lighteval|agieval:lsat-rc|0": 0, | |
"lighteval|agieval:sat-en|0": 0, | |
"lighteval|agieval:sat-en-without-passage|0": 0, | |
"lighteval|agieval:sat-math|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|agieval:aqua-rat": { | |
"name": "agieval:aqua-rat", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-aqua-rat", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 254, | |
"effective_num_docs": 254, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-biology": { | |
"name": "agieval:gaokao-biology", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-biology", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chemistry": { | |
"name": "agieval:gaokao-chemistry", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chemistry", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 207, | |
"effective_num_docs": 207, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chinese": { | |
"name": "agieval:gaokao-chinese", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chinese", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 246, | |
"effective_num_docs": 246, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-english": { | |
"name": "agieval:gaokao-english", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-english", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-geography": { | |
"name": "agieval:gaokao-geography", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-geography", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 199, | |
"effective_num_docs": 199, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-history": { | |
"name": "agieval:gaokao-history", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-history", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-mathqa": { | |
"name": "agieval:gaokao-mathqa", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-mathqa", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 351, | |
"effective_num_docs": 351, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-physics": { | |
"name": "agieval:gaokao-physics", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-physics", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 200, | |
"effective_num_docs": 200, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-en": { | |
"name": "agieval:logiqa-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-zh": { | |
"name": "agieval:logiqa-zh", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-zh", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-ar": { | |
"name": "agieval:lsat-ar", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-ar", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 230, | |
"effective_num_docs": 230, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-lr": { | |
"name": "agieval:lsat-lr", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-lr", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 510, | |
"effective_num_docs": 510, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-rc": { | |
"name": "agieval:lsat-rc", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-rc", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 269, | |
"effective_num_docs": 269, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en": { | |
"name": "agieval:sat-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en-without-passage": { | |
"name": "agieval:sat-en-without-passage", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en-without-passage", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-math": { | |
"name": "agieval:sat-math", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-math", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 220, | |
"effective_num_docs": 220, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|agieval:aqua-rat|0": { | |
"hashes": { | |
"hash_examples": "f09607f69e5b7525", | |
"hash_full_prompts": "8ad711fb8fb77d94", | |
"hash_input_tokens": "3ca91e227e1f5ee8", | |
"hash_cont_tokens": "68076809549d1d3f" | |
}, | |
"truncated": 0, | |
"non_truncated": 254, | |
"padded": 1270, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"hashes": { | |
"hash_examples": "f262eaf4a72db963", | |
"hash_full_prompts": "33999b9b989424c2", | |
"hash_input_tokens": "8bf36c825bbcbace", | |
"hash_cont_tokens": "6e251426994bce31" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 833, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"hashes": { | |
"hash_examples": "47f2e649f58d9da5", | |
"hash_full_prompts": "7d7d09f9eb879955", | |
"hash_input_tokens": "8f96a029d955595e", | |
"hash_cont_tokens": "4871f0178c1adc9e" | |
}, | |
"truncated": 0, | |
"non_truncated": 207, | |
"padded": 827, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"hashes": { | |
"hash_examples": "1010b21fde4726ab", | |
"hash_full_prompts": "60c65321d5cc8691", | |
"hash_input_tokens": "e71306cf9d3a3034", | |
"hash_cont_tokens": "0be39ceaef751ad1" | |
}, | |
"truncated": 0, | |
"non_truncated": 246, | |
"padded": 979, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"hashes": { | |
"hash_examples": "4864e492a350ae93", | |
"hash_full_prompts": "65c242dcefa907ed", | |
"hash_input_tokens": "f8bfdf5178b55ada", | |
"hash_cont_tokens": "689c6ea1272771f8" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1224, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"hashes": { | |
"hash_examples": "ec3a021e37650e7d", | |
"hash_full_prompts": "d66698e42924982f", | |
"hash_input_tokens": "4df6fd6f05dd97b2", | |
"hash_cont_tokens": "fc7dd10486347853" | |
}, | |
"truncated": 0, | |
"non_truncated": 199, | |
"padded": 793, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"hashes": { | |
"hash_examples": "b3fad1596f1ae1f9", | |
"hash_full_prompts": "b688278c6bb839a5", | |
"hash_input_tokens": "bf01c4ae7bccfb50", | |
"hash_cont_tokens": "94a46bd09bc7f9a7" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 934, | |
"non_padded": 6, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"hashes": { | |
"hash_examples": "1d1088556861b0b0", | |
"hash_full_prompts": "a5f205e77d0a99c2", | |
"hash_input_tokens": "e955f3458ac121d3", | |
"hash_cont_tokens": "d19aa287b771e823" | |
}, | |
"truncated": 0, | |
"non_truncated": 351, | |
"padded": 1396, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"hashes": { | |
"hash_examples": "eb05f035c7bfca2f", | |
"hash_full_prompts": "408cff052da9e941", | |
"hash_input_tokens": "240bc1cf51689ef3", | |
"hash_cont_tokens": "78f68d934030f9d6" | |
}, | |
"truncated": 0, | |
"non_truncated": 200, | |
"padded": 792, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"hashes": { | |
"hash_examples": "0a688a45f69c21e0", | |
"hash_full_prompts": "4abde5e3d4c7b3f8", | |
"hash_input_tokens": "b49fe92bef71b0db", | |
"hash_cont_tokens": "78b51c7dda5a457a" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2592, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"hashes": { | |
"hash_examples": "620d6888b6012ea5", | |
"hash_full_prompts": "dc4ddbb9c38aef67", | |
"hash_input_tokens": "2eef4f372fdad7b7", | |
"hash_cont_tokens": "33ab0a1feb3a2fed" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2588, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"hashes": { | |
"hash_examples": "627c8f5ccd5da209", | |
"hash_full_prompts": "85f4e2da30c42407", | |
"hash_input_tokens": "b8ef0c74c45ff792", | |
"hash_cont_tokens": "5a4d3fed21889b2c" | |
}, | |
"truncated": 0, | |
"non_truncated": 230, | |
"padded": 1145, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"hashes": { | |
"hash_examples": "794641c86de172f5", | |
"hash_full_prompts": "833dd104fba3a50a", | |
"hash_input_tokens": "8bcb7119e593166c", | |
"hash_cont_tokens": "449ff43e1d759ce9" | |
}, | |
"truncated": 0, | |
"non_truncated": 510, | |
"padded": 2537, | |
"non_padded": 13, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"hashes": { | |
"hash_examples": "35981ed917ea01cf", | |
"hash_full_prompts": "1f6ee9e7b383dee6", | |
"hash_input_tokens": "0e5dcc5d9ba3acb4", | |
"hash_cont_tokens": "3c2d8acf3e02c384" | |
}, | |
"truncated": 0, | |
"non_truncated": 269, | |
"padded": 1345, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"hashes": { | |
"hash_examples": "041c39c646536a1e", | |
"hash_full_prompts": "dc367ba3deb69c80", | |
"hash_input_tokens": "8f6bcc50dd80c45f", | |
"hash_cont_tokens": "a6f1ab815d02f06d" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 821, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"hashes": { | |
"hash_examples": "e4d9284367dff68f", | |
"hash_full_prompts": "dc84a50c7cf1b0da", | |
"hash_input_tokens": "c63a20ff70e10200", | |
"hash_cont_tokens": "a6f1ab815d02f06d" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 811, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"hashes": { | |
"hash_examples": "01db7291603fc1a0", | |
"hash_full_prompts": "fd1ffdeea790b637", | |
"hash_input_tokens": "7d57211e46d19552", | |
"hash_cont_tokens": "51a49f5633293d60" | |
}, | |
"truncated": 0, | |
"non_truncated": 220, | |
"padded": 875, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "da3af66181f18ddf", | |
"hash_full_prompts": "6cd70cd84b2d90cb", | |
"hash_input_tokens": "2c432ff3076aeb5a", | |
"hash_cont_tokens": "a3082c4ed8f63f0b" | |
}, | |
"truncated": 0, | |
"non_truncated": 5151, | |
"padded": 21762, | |
"non_padded": 102, | |
"num_truncated_few_shots": 0 | |
} | |
} |