open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/starcoder2-15b-dpo
/v4.1
/agieval
/results_2024-03-28T16-48-35.789845.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1032334.39355707, | |
"end_time": 1032764.678856595, | |
"total_evaluation_time_secondes": "430.2852995248977", | |
"model_name": "HuggingFaceH4/starcoder2-15b-dpo", | |
"model_sha": "3fea85086795d1c7043849eb550e631fbf8426f4", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "30.04 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|agieval:aqua-rat|0": { | |
"acc": 0.3031496062992126, | |
"acc_stderr": 0.02889601929036988, | |
"acc_norm": 0.2952755905511811, | |
"acc_norm_stderr": 0.028678944926860864 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"acc": 0.3476190476190476, | |
"acc_stderr": 0.032940430891650836, | |
"acc_norm": 0.35714285714285715, | |
"acc_norm_stderr": 0.033144012047664914 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"acc": 0.30917874396135264, | |
"acc_stderr": 0.032199864940004486, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.03284430927214311 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"acc": 0.4105691056910569, | |
"acc_stderr": 0.03142870877522598, | |
"acc_norm": 0.4024390243902439, | |
"acc_norm_stderr": 0.03132983555771989 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"acc": 0.6797385620915033, | |
"acc_stderr": 0.026716118380156847, | |
"acc_norm": 0.6339869281045751, | |
"acc_norm_stderr": 0.027582811415159607 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"acc": 0.4221105527638191, | |
"acc_stderr": 0.035099658550329385, | |
"acc_norm": 0.4321608040201005, | |
"acc_norm_stderr": 0.03520487250258454 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"acc": 0.4595744680851064, | |
"acc_stderr": 0.03257901482099834, | |
"acc_norm": 0.43829787234042555, | |
"acc_norm_stderr": 0.032436186361081004 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"acc": 0.28774928774928776, | |
"acc_stderr": 0.024198561654366728, | |
"acc_norm": 0.28774928774928776, | |
"acc_norm_stderr": 0.024198561654366735 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"acc": 0.335, | |
"acc_stderr": 0.033458517029435815, | |
"acc_norm": 0.355, | |
"acc_norm_stderr": 0.033920910080708536 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"acc": 0.3425499231950845, | |
"acc_stderr": 0.01861386882920801, | |
"acc_norm": 0.36251920122887865, | |
"acc_norm_stderr": 0.01885568797958507 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"acc": 0.36098310291858676, | |
"acc_stderr": 0.018838352954538687, | |
"acc_norm": 0.37019969278033793, | |
"acc_norm_stderr": 0.01893925025913045 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"acc": 0.18695652173913044, | |
"acc_stderr": 0.025763772398512325, | |
"acc_norm": 0.16521739130434782, | |
"acc_norm_stderr": 0.02454125880854541 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"acc": 0.4117647058823529, | |
"acc_stderr": 0.021814296283441937, | |
"acc_norm": 0.3686274509803922, | |
"acc_norm_stderr": 0.021383450873181317 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"acc": 0.483271375464684, | |
"acc_stderr": 0.030525261933744594, | |
"acc_norm": 0.3940520446096654, | |
"acc_norm_stderr": 0.029848812493479995 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"acc": 0.6553398058252428, | |
"acc_stderr": 0.0331934128585908, | |
"acc_norm": 0.5631067961165048, | |
"acc_norm_stderr": 0.034642250552412775 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"acc": 0.42718446601941745, | |
"acc_stderr": 0.03454921537431906, | |
"acc_norm": 0.32524271844660196, | |
"acc_norm_stderr": 0.0327190473759639 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"acc": 0.33636363636363636, | |
"acc_stderr": 0.03192622349349311, | |
"acc_norm": 0.2909090909090909, | |
"acc_norm_stderr": 0.030690753276711096 | |
}, | |
"lighteval|agieval:_average|0": { | |
"acc": 0.3975942889216777, | |
"acc_stderr": 0.02898478226225805, | |
"acc_norm": 0.37501529905928377, | |
"acc_norm_stderr": 0.02888005620219407 | |
}, | |
"all": { | |
"acc": 0.3975942889216777, | |
"acc_stderr": 0.02898478226225805, | |
"acc_norm": 0.37501529905928377, | |
"acc_norm_stderr": 0.02888005620219407 | |
} | |
}, | |
"versions": { | |
"lighteval|agieval:aqua-rat|0": 0, | |
"lighteval|agieval:gaokao-biology|0": 0, | |
"lighteval|agieval:gaokao-chemistry|0": 0, | |
"lighteval|agieval:gaokao-chinese|0": 0, | |
"lighteval|agieval:gaokao-english|0": 0, | |
"lighteval|agieval:gaokao-geography|0": 0, | |
"lighteval|agieval:gaokao-history|0": 0, | |
"lighteval|agieval:gaokao-mathqa|0": 0, | |
"lighteval|agieval:gaokao-physics|0": 0, | |
"lighteval|agieval:logiqa-en|0": 0, | |
"lighteval|agieval:logiqa-zh|0": 0, | |
"lighteval|agieval:lsat-ar|0": 0, | |
"lighteval|agieval:lsat-lr|0": 0, | |
"lighteval|agieval:lsat-rc|0": 0, | |
"lighteval|agieval:sat-en|0": 0, | |
"lighteval|agieval:sat-en-without-passage|0": 0, | |
"lighteval|agieval:sat-math|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|agieval:aqua-rat": { | |
"name": "agieval:aqua-rat", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-aqua-rat", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 254, | |
"effective_num_docs": 254, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-biology": { | |
"name": "agieval:gaokao-biology", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-biology", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chemistry": { | |
"name": "agieval:gaokao-chemistry", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chemistry", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 207, | |
"effective_num_docs": 207, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chinese": { | |
"name": "agieval:gaokao-chinese", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chinese", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 246, | |
"effective_num_docs": 246, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-english": { | |
"name": "agieval:gaokao-english", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-english", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-geography": { | |
"name": "agieval:gaokao-geography", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-geography", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 199, | |
"effective_num_docs": 199, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-history": { | |
"name": "agieval:gaokao-history", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-history", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-mathqa": { | |
"name": "agieval:gaokao-mathqa", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-mathqa", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 351, | |
"effective_num_docs": 351, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-physics": { | |
"name": "agieval:gaokao-physics", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-physics", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 200, | |
"effective_num_docs": 200, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-en": { | |
"name": "agieval:logiqa-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-zh": { | |
"name": "agieval:logiqa-zh", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-zh", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-ar": { | |
"name": "agieval:lsat-ar", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-ar", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 230, | |
"effective_num_docs": 230, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-lr": { | |
"name": "agieval:lsat-lr", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-lr", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 510, | |
"effective_num_docs": 510, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-rc": { | |
"name": "agieval:lsat-rc", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-rc", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 269, | |
"effective_num_docs": 269, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en": { | |
"name": "agieval:sat-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en-without-passage": { | |
"name": "agieval:sat-en-without-passage", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en-without-passage", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-math": { | |
"name": "agieval:sat-math", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-math", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 220, | |
"effective_num_docs": 220, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|agieval:aqua-rat|0": { | |
"hashes": { | |
"hash_examples": "f09607f69e5b7525", | |
"hash_full_prompts": "f0af1499da980246", | |
"hash_input_tokens": "8baf74e20ece8648", | |
"hash_cont_tokens": "9b1938f401dd8b11" | |
}, | |
"truncated": 0, | |
"non_truncated": 254, | |
"padded": 1270, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"hashes": { | |
"hash_examples": "f262eaf4a72db963", | |
"hash_full_prompts": "4027de4b1cdd1c67", | |
"hash_input_tokens": "39dc1edf1aee0fbb", | |
"hash_cont_tokens": "688fa7bad594603f" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 840, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"hashes": { | |
"hash_examples": "47f2e649f58d9da5", | |
"hash_full_prompts": "39de31ab927f9675", | |
"hash_input_tokens": "20e47fcf97b7770e", | |
"hash_cont_tokens": "b1270b8e647495ca" | |
}, | |
"truncated": 0, | |
"non_truncated": 207, | |
"padded": 830, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"hashes": { | |
"hash_examples": "1010b21fde4726ab", | |
"hash_full_prompts": "3b4313f1bd85fd2e", | |
"hash_input_tokens": "3898b0f0f090f3d0", | |
"hash_cont_tokens": "d3b9d7bfcbb19698" | |
}, | |
"truncated": 0, | |
"non_truncated": 246, | |
"padded": 982, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"hashes": { | |
"hash_examples": "4864e492a350ae93", | |
"hash_full_prompts": "547863254a606496", | |
"hash_input_tokens": "239abc88ee71e803", | |
"hash_cont_tokens": "cfc7630481f785d5" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1224, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"hashes": { | |
"hash_examples": "ec3a021e37650e7d", | |
"hash_full_prompts": "0a7cffbf555ab29e", | |
"hash_input_tokens": "b4c7b4b03d284285", | |
"hash_cont_tokens": "720e9ef67de27447" | |
}, | |
"truncated": 0, | |
"non_truncated": 199, | |
"padded": 793, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"hashes": { | |
"hash_examples": "b3fad1596f1ae1f9", | |
"hash_full_prompts": "b8aca4146c3435af", | |
"hash_input_tokens": "9789eb63f352f670", | |
"hash_cont_tokens": "e06f636fede16bab" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 928, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"hashes": { | |
"hash_examples": "1d1088556861b0b0", | |
"hash_full_prompts": "1441e196c635c040", | |
"hash_input_tokens": "4c7d3d8c766b4133", | |
"hash_cont_tokens": "9a043c4851ecba43" | |
}, | |
"truncated": 0, | |
"non_truncated": 351, | |
"padded": 1401, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"hashes": { | |
"hash_examples": "eb05f035c7bfca2f", | |
"hash_full_prompts": "be15722274b1466d", | |
"hash_input_tokens": "cad736440c03b505", | |
"hash_cont_tokens": "1c5e032af2b83252" | |
}, | |
"truncated": 0, | |
"non_truncated": 200, | |
"padded": 795, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"hashes": { | |
"hash_examples": "0a688a45f69c21e0", | |
"hash_full_prompts": "ca179e67bdc726a6", | |
"hash_input_tokens": "1fbf31a77a24b094", | |
"hash_cont_tokens": "a5d0dff7311f907e" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2588, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"hashes": { | |
"hash_examples": "620d6888b6012ea5", | |
"hash_full_prompts": "55e305ed89c6e580", | |
"hash_input_tokens": "de67a5e6f4908ede", | |
"hash_cont_tokens": "c3a71ac47a57ec15" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2585, | |
"non_padded": 19, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"hashes": { | |
"hash_examples": "627c8f5ccd5da209", | |
"hash_full_prompts": "59e010e22954d5b7", | |
"hash_input_tokens": "859b7791e0b3096c", | |
"hash_cont_tokens": "1c426a8ed2791d40" | |
}, | |
"truncated": 0, | |
"non_truncated": 230, | |
"padded": 1150, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"hashes": { | |
"hash_examples": "794641c86de172f5", | |
"hash_full_prompts": "efc3c1a3a1586d3e", | |
"hash_input_tokens": "9b0cfa412091f4be", | |
"hash_cont_tokens": "0dbb27d96287a25f" | |
}, | |
"truncated": 0, | |
"non_truncated": 510, | |
"padded": 2526, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"hashes": { | |
"hash_examples": "35981ed917ea01cf", | |
"hash_full_prompts": "b80e2b86e1eb0cea", | |
"hash_input_tokens": "481cbc8831175348", | |
"hash_cont_tokens": "da450aec35e6a43a" | |
}, | |
"truncated": 0, | |
"non_truncated": 269, | |
"padded": 1345, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"hashes": { | |
"hash_examples": "041c39c646536a1e", | |
"hash_full_prompts": "4eb610121b313521", | |
"hash_input_tokens": "500851b7c0e4f612", | |
"hash_cont_tokens": "79a85abe49b36aab" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 821, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"hashes": { | |
"hash_examples": "e4d9284367dff68f", | |
"hash_full_prompts": "532ea18906ff2f4e", | |
"hash_input_tokens": "1592fd5eff06e0b0", | |
"hash_cont_tokens": "79a85abe49b36aab" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 816, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"hashes": { | |
"hash_examples": "01db7291603fc1a0", | |
"hash_full_prompts": "1422fad2e0cca51f", | |
"hash_input_tokens": "1af07c78239d2621", | |
"hash_cont_tokens": "aee1d0a52fed5f88" | |
}, | |
"truncated": 0, | |
"non_truncated": 220, | |
"padded": 873, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "da3af66181f18ddf", | |
"hash_full_prompts": "e89209d4ce68d63a", | |
"hash_input_tokens": "5bfe88e83744723c", | |
"hash_cont_tokens": "2c3ea9683e04c17d" | |
}, | |
"truncated": 0, | |
"non_truncated": 5151, | |
"padded": 21767, | |
"non_padded": 97, | |
"num_truncated_few_shots": 0 | |
} | |
} |