open-r1-eval-leaderboard
/
eval_results
/databricks
/dbrx-base
/main
/agieval
/results_2024-03-30T21-38-32.137846.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1422585.581207942, | |
"end_time": 1428994.705335767, | |
"total_evaluation_time_secondes": "6409.124127825024", | |
"model_name": "databricks/dbrx-base", | |
"model_sha": "4e1f4c3e5452762b62b8cf3c2eee863c52da7903", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "245.12 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|agieval:aqua-rat|0": { | |
"acc": 0.2677165354330709, | |
"acc_stderr": 0.02783664886644535, | |
"acc_norm": 0.2440944881889764, | |
"acc_norm_stderr": 0.027005516126961032 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"acc": 0.3523809523809524, | |
"acc_stderr": 0.03304401999334817, | |
"acc_norm": 0.4095238095238095, | |
"acc_norm_stderr": 0.03401477718256436 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"acc": 0.3188405797101449, | |
"acc_stderr": 0.03246964709878484, | |
"acc_norm": 0.30917874396135264, | |
"acc_norm_stderr": 0.03219986494000449 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"acc": 0.33739837398373984, | |
"acc_stderr": 0.030207494084156634, | |
"acc_norm": 0.33739837398373984, | |
"acc_norm_stderr": 0.03020749408415664 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"acc": 0.7222222222222222, | |
"acc_stderr": 0.02564686309713791, | |
"acc_norm": 0.7516339869281046, | |
"acc_norm_stderr": 0.02473998135511359 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"acc": 0.5829145728643216, | |
"acc_stderr": 0.035041474244182344, | |
"acc_norm": 0.592964824120603, | |
"acc_norm_stderr": 0.03491385802519052 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"acc": 0.6468085106382979, | |
"acc_stderr": 0.031245325202761926, | |
"acc_norm": 0.5957446808510638, | |
"acc_norm_stderr": 0.03208115750788684 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"acc": 0.2905982905982906, | |
"acc_stderr": 0.024269376594479995, | |
"acc_norm": 0.2934472934472934, | |
"acc_norm_stderr": 0.024339032696810918 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"acc": 0.42, | |
"acc_stderr": 0.034987434930487206, | |
"acc_norm": 0.395, | |
"acc_norm_stderr": 0.03465370682892271 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"acc": 0.36251920122887865, | |
"acc_stderr": 0.01885568797958507, | |
"acc_norm": 0.36251920122887865, | |
"acc_norm_stderr": 0.01885568797958506 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"acc": 0.35176651305683565, | |
"acc_stderr": 0.01872993627442735, | |
"acc_norm": 0.3563748079877112, | |
"acc_norm_stderr": 0.01878509246182 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"acc": 0.21739130434782608, | |
"acc_stderr": 0.02725685083881996, | |
"acc_norm": 0.19130434782608696, | |
"acc_norm_stderr": 0.025991852462828483 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"acc": 0.5490196078431373, | |
"acc_stderr": 0.022055345548930266, | |
"acc_norm": 0.43529411764705883, | |
"acc_norm_stderr": 0.02197574787518572 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"acc": 0.6877323420074349, | |
"acc_stderr": 0.02830778120469435, | |
"acc_norm": 0.5427509293680297, | |
"acc_norm_stderr": 0.03043051529856916 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"acc": 0.8009708737864077, | |
"acc_stderr": 0.02788622643950503, | |
"acc_norm": 0.7330097087378641, | |
"acc_norm_stderr": 0.030897665523458163 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"acc": 0.5388349514563107, | |
"acc_stderr": 0.03481602144131183, | |
"acc_norm": 0.41262135922330095, | |
"acc_norm_stderr": 0.03438412659410014 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"acc": 0.4681818181818182, | |
"acc_stderr": 0.03371838809107288, | |
"acc_norm": 0.39090909090909093, | |
"acc_norm_stderr": 0.03297288034347065 | |
}, | |
"lighteval|agieval:_average|0": { | |
"acc": 0.46560568527880525, | |
"acc_stderr": 0.028610265995890068, | |
"acc_norm": 0.4325746919960567, | |
"acc_norm_stderr": 0.02873229160509579 | |
}, | |
"all": { | |
"acc": 0.46560568527880525, | |
"acc_stderr": 0.028610265995890068, | |
"acc_norm": 0.4325746919960567, | |
"acc_norm_stderr": 0.02873229160509579 | |
} | |
}, | |
"versions": { | |
"lighteval|agieval:aqua-rat|0": 0, | |
"lighteval|agieval:gaokao-biology|0": 0, | |
"lighteval|agieval:gaokao-chemistry|0": 0, | |
"lighteval|agieval:gaokao-chinese|0": 0, | |
"lighteval|agieval:gaokao-english|0": 0, | |
"lighteval|agieval:gaokao-geography|0": 0, | |
"lighteval|agieval:gaokao-history|0": 0, | |
"lighteval|agieval:gaokao-mathqa|0": 0, | |
"lighteval|agieval:gaokao-physics|0": 0, | |
"lighteval|agieval:logiqa-en|0": 0, | |
"lighteval|agieval:logiqa-zh|0": 0, | |
"lighteval|agieval:lsat-ar|0": 0, | |
"lighteval|agieval:lsat-lr|0": 0, | |
"lighteval|agieval:lsat-rc|0": 0, | |
"lighteval|agieval:sat-en|0": 0, | |
"lighteval|agieval:sat-en-without-passage|0": 0, | |
"lighteval|agieval:sat-math|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|agieval:aqua-rat": { | |
"name": "agieval:aqua-rat", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-aqua-rat", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 254, | |
"effective_num_docs": 254, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-biology": { | |
"name": "agieval:gaokao-biology", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-biology", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chemistry": { | |
"name": "agieval:gaokao-chemistry", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chemistry", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 207, | |
"effective_num_docs": 207, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chinese": { | |
"name": "agieval:gaokao-chinese", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chinese", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 246, | |
"effective_num_docs": 246, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-english": { | |
"name": "agieval:gaokao-english", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-english", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-geography": { | |
"name": "agieval:gaokao-geography", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-geography", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 199, | |
"effective_num_docs": 199, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-history": { | |
"name": "agieval:gaokao-history", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-history", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-mathqa": { | |
"name": "agieval:gaokao-mathqa", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-mathqa", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 351, | |
"effective_num_docs": 351, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-physics": { | |
"name": "agieval:gaokao-physics", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-physics", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 200, | |
"effective_num_docs": 200, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-en": { | |
"name": "agieval:logiqa-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-zh": { | |
"name": "agieval:logiqa-zh", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-zh", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-ar": { | |
"name": "agieval:lsat-ar", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-ar", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 230, | |
"effective_num_docs": 230, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-lr": { | |
"name": "agieval:lsat-lr", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-lr", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 510, | |
"effective_num_docs": 510, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-rc": { | |
"name": "agieval:lsat-rc", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-rc", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 269, | |
"effective_num_docs": 269, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en": { | |
"name": "agieval:sat-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en-without-passage": { | |
"name": "agieval:sat-en-without-passage", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en-without-passage", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-math": { | |
"name": "agieval:sat-math", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-math", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 220, | |
"effective_num_docs": 220, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|agieval:aqua-rat|0": { | |
"hashes": { | |
"hash_examples": "f09607f69e5b7525", | |
"hash_full_prompts": "f09607f69e5b7525", | |
"hash_input_tokens": "5e845ed6a5a59a8b", | |
"hash_cont_tokens": "13c4460922661f6d" | |
}, | |
"truncated": 0, | |
"non_truncated": 254, | |
"padded": 1270, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"hashes": { | |
"hash_examples": "f262eaf4a72db963", | |
"hash_full_prompts": "f262eaf4a72db963", | |
"hash_input_tokens": "15e32c0bb9018595", | |
"hash_cont_tokens": "f7ca0af9c4bb24f3" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 840, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"hashes": { | |
"hash_examples": "47f2e649f58d9da5", | |
"hash_full_prompts": "47f2e649f58d9da5", | |
"hash_input_tokens": "79e0c48ebebf97e6", | |
"hash_cont_tokens": "fabcd867b980328e" | |
}, | |
"truncated": 0, | |
"non_truncated": 207, | |
"padded": 831, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"hashes": { | |
"hash_examples": "1010b21fde4726ab", | |
"hash_full_prompts": "1010b21fde4726ab", | |
"hash_input_tokens": "4d94735847efad72", | |
"hash_cont_tokens": "7758ed9477088621" | |
}, | |
"truncated": 0, | |
"non_truncated": 246, | |
"padded": 982, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"hashes": { | |
"hash_examples": "4864e492a350ae93", | |
"hash_full_prompts": "4864e492a350ae93", | |
"hash_input_tokens": "4a838d8e9a91fe62", | |
"hash_cont_tokens": "7c77a755a025b7d3" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1218, | |
"non_padded": 6, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"hashes": { | |
"hash_examples": "ec3a021e37650e7d", | |
"hash_full_prompts": "ec3a021e37650e7d", | |
"hash_input_tokens": "9c9a81cac8b66719", | |
"hash_cont_tokens": "4ad67a82d321a083" | |
}, | |
"truncated": 0, | |
"non_truncated": 199, | |
"padded": 796, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"hashes": { | |
"hash_examples": "b3fad1596f1ae1f9", | |
"hash_full_prompts": "b3fad1596f1ae1f9", | |
"hash_input_tokens": "06e991bbc67fe0ad", | |
"hash_cont_tokens": "5773d3e5394a127f" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 940, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"hashes": { | |
"hash_examples": "1d1088556861b0b0", | |
"hash_full_prompts": "1d1088556861b0b0", | |
"hash_input_tokens": "cc5894dcbd8929a6", | |
"hash_cont_tokens": "074ce57bdfb21a5b" | |
}, | |
"truncated": 0, | |
"non_truncated": 351, | |
"padded": 1404, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"hashes": { | |
"hash_examples": "eb05f035c7bfca2f", | |
"hash_full_prompts": "eb05f035c7bfca2f", | |
"hash_input_tokens": "85a92d12f41170d5", | |
"hash_cont_tokens": "f8182436964f9017" | |
}, | |
"truncated": 0, | |
"non_truncated": 200, | |
"padded": 800, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"hashes": { | |
"hash_examples": "0a688a45f69c21e0", | |
"hash_full_prompts": "0a688a45f69c21e0", | |
"hash_input_tokens": "aa01c09fda59aef4", | |
"hash_cont_tokens": "86626d7a51d9ba03" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2599, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"hashes": { | |
"hash_examples": "620d6888b6012ea5", | |
"hash_full_prompts": "620d6888b6012ea5", | |
"hash_input_tokens": "1695856d2ea03235", | |
"hash_cont_tokens": "5c235458a89a73e9" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2592, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"hashes": { | |
"hash_examples": "627c8f5ccd5da209", | |
"hash_full_prompts": "627c8f5ccd5da209", | |
"hash_input_tokens": "d8a2fef5ce117c68", | |
"hash_cont_tokens": "76a773001205abb5" | |
}, | |
"truncated": 0, | |
"non_truncated": 230, | |
"padded": 1147, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"hashes": { | |
"hash_examples": "794641c86de172f5", | |
"hash_full_prompts": "794641c86de172f5", | |
"hash_input_tokens": "e738982173c1244d", | |
"hash_cont_tokens": "04ee8888f6032d56" | |
}, | |
"truncated": 0, | |
"non_truncated": 510, | |
"padded": 2519, | |
"non_padded": 31, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"hashes": { | |
"hash_examples": "35981ed917ea01cf", | |
"hash_full_prompts": "35981ed917ea01cf", | |
"hash_input_tokens": "db2fbdf3c4b6aad0", | |
"hash_cont_tokens": "d07111c2a7dfec96" | |
}, | |
"truncated": 0, | |
"non_truncated": 269, | |
"padded": 1345, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"hashes": { | |
"hash_examples": "041c39c646536a1e", | |
"hash_full_prompts": "041c39c646536a1e", | |
"hash_input_tokens": "7431fc1d02cbfbeb", | |
"hash_cont_tokens": "9e81a3554c11cb1d" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 821, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"hashes": { | |
"hash_examples": "e4d9284367dff68f", | |
"hash_full_prompts": "e4d9284367dff68f", | |
"hash_input_tokens": "3d8ef71036a52557", | |
"hash_cont_tokens": "9e81a3554c11cb1d" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 819, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"hashes": { | |
"hash_examples": "01db7291603fc1a0", | |
"hash_full_prompts": "01db7291603fc1a0", | |
"hash_input_tokens": "82caa2d74dee3221", | |
"hash_cont_tokens": "b61900fe25c4c91d" | |
}, | |
"truncated": 0, | |
"non_truncated": 220, | |
"padded": 878, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "da3af66181f18ddf", | |
"hash_full_prompts": "da3af66181f18ddf", | |
"hash_input_tokens": "513e4104fbe52ac3", | |
"hash_cont_tokens": "0f1f5acaf948f318" | |
}, | |
"truncated": 0, | |
"non_truncated": 5151, | |
"padded": 21801, | |
"non_padded": 63, | |
"num_truncated_few_shots": 0 | |
} | |
} |