open-r1-eval-leaderboard
/
eval_results
/stabilityai
/stablelm-zephyr-3b
/main
/mmlu
/results_2024-03-05T21-42-00.735420.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 906482.381460705, | |
"end_time": 907202.984284406, | |
"total_evaluation_time_secondes": "720.6028237009887", | |
"model_name": "stabilityai/stablelm-zephyr-3b", | |
"model_sha": "8b471c751c0e78cb46cf9f47738dd0eb45392071", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "5.22 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|mmlu:abstract_algebra|5": { | |
"acc": 0.3, | |
"acc_stderr": 0.046056618647183814 | |
}, | |
"lighteval|mmlu:anatomy|5": { | |
"acc": 0.4, | |
"acc_stderr": 0.04232073695151589 | |
}, | |
"lighteval|mmlu:astronomy|5": { | |
"acc": 0.4276315789473684, | |
"acc_stderr": 0.04026097083296559 | |
}, | |
"lighteval|mmlu:business_ethics|5": { | |
"acc": 0.5, | |
"acc_stderr": 0.050251890762960605 | |
}, | |
"lighteval|mmlu:clinical_knowledge|5": { | |
"acc": 0.5056603773584906, | |
"acc_stderr": 0.030770900763851302 | |
}, | |
"lighteval|mmlu:college_biology|5": { | |
"acc": 0.3680555555555556, | |
"acc_stderr": 0.040329990539607195 | |
}, | |
"lighteval|mmlu:college_chemistry|5": { | |
"acc": 0.34, | |
"acc_stderr": 0.04760952285695235 | |
}, | |
"lighteval|mmlu:college_computer_science|5": { | |
"acc": 0.43, | |
"acc_stderr": 0.049756985195624284 | |
}, | |
"lighteval|mmlu:college_mathematics|5": { | |
"acc": 0.33, | |
"acc_stderr": 0.047258156262526045 | |
}, | |
"lighteval|mmlu:college_medicine|5": { | |
"acc": 0.4508670520231214, | |
"acc_stderr": 0.037940126746970296 | |
}, | |
"lighteval|mmlu:college_physics|5": { | |
"acc": 0.22549019607843138, | |
"acc_stderr": 0.041583075330832865 | |
}, | |
"lighteval|mmlu:computer_security|5": { | |
"acc": 0.55, | |
"acc_stderr": 0.04999999999999999 | |
}, | |
"lighteval|mmlu:conceptual_physics|5": { | |
"acc": 0.33191489361702126, | |
"acc_stderr": 0.03078373675774566 | |
}, | |
"lighteval|mmlu:econometrics|5": { | |
"acc": 0.3157894736842105, | |
"acc_stderr": 0.04372748290278008 | |
}, | |
"lighteval|mmlu:electrical_engineering|5": { | |
"acc": 0.41379310344827586, | |
"acc_stderr": 0.04104269211806232 | |
}, | |
"lighteval|mmlu:elementary_mathematics|5": { | |
"acc": 0.3386243386243386, | |
"acc_stderr": 0.024373197867983056 | |
}, | |
"lighteval|mmlu:formal_logic|5": { | |
"acc": 0.25396825396825395, | |
"acc_stderr": 0.03893259610604676 | |
}, | |
"lighteval|mmlu:global_facts|5": { | |
"acc": 0.37, | |
"acc_stderr": 0.04852365870939099 | |
}, | |
"lighteval|mmlu:high_school_biology|5": { | |
"acc": 0.5709677419354838, | |
"acc_stderr": 0.028156036538233193 | |
}, | |
"lighteval|mmlu:high_school_chemistry|5": { | |
"acc": 0.3054187192118227, | |
"acc_stderr": 0.032406615658684086 | |
}, | |
"lighteval|mmlu:high_school_computer_science|5": { | |
"acc": 0.47, | |
"acc_stderr": 0.05016135580465919 | |
}, | |
"lighteval|mmlu:high_school_european_history|5": { | |
"acc": 0.5575757575757576, | |
"acc_stderr": 0.03878372113711274 | |
}, | |
"lighteval|mmlu:high_school_geography|5": { | |
"acc": 0.5707070707070707, | |
"acc_stderr": 0.035265527246011986 | |
}, | |
"lighteval|mmlu:high_school_government_and_politics|5": { | |
"acc": 0.5751295336787565, | |
"acc_stderr": 0.035674713352125395 | |
}, | |
"lighteval|mmlu:high_school_macroeconomics|5": { | |
"acc": 0.37948717948717947, | |
"acc_stderr": 0.024603626924097406 | |
}, | |
"lighteval|mmlu:high_school_mathematics|5": { | |
"acc": 0.24444444444444444, | |
"acc_stderr": 0.026202766534652148 | |
}, | |
"lighteval|mmlu:high_school_microeconomics|5": { | |
"acc": 0.38235294117647056, | |
"acc_stderr": 0.03156663099215416 | |
}, | |
"lighteval|mmlu:high_school_physics|5": { | |
"acc": 0.2847682119205298, | |
"acc_stderr": 0.03684881521389023 | |
}, | |
"lighteval|mmlu:high_school_psychology|5": { | |
"acc": 0.6311926605504588, | |
"acc_stderr": 0.02068622756072956 | |
}, | |
"lighteval|mmlu:high_school_statistics|5": { | |
"acc": 0.3101851851851852, | |
"acc_stderr": 0.03154696285656629 | |
}, | |
"lighteval|mmlu:high_school_us_history|5": { | |
"acc": 0.5882352941176471, | |
"acc_stderr": 0.03454236585380609 | |
}, | |
"lighteval|mmlu:high_school_world_history|5": { | |
"acc": 0.620253164556962, | |
"acc_stderr": 0.031591887529658504 | |
}, | |
"lighteval|mmlu:human_aging|5": { | |
"acc": 0.49327354260089684, | |
"acc_stderr": 0.033554765962343545 | |
}, | |
"lighteval|mmlu:human_sexuality|5": { | |
"acc": 0.48854961832061067, | |
"acc_stderr": 0.043841400240780176 | |
}, | |
"lighteval|mmlu:international_law|5": { | |
"acc": 0.5206611570247934, | |
"acc_stderr": 0.04560456086387235 | |
}, | |
"lighteval|mmlu:jurisprudence|5": { | |
"acc": 0.5185185185185185, | |
"acc_stderr": 0.04830366024635331 | |
}, | |
"lighteval|mmlu:logical_fallacies|5": { | |
"acc": 0.4785276073619632, | |
"acc_stderr": 0.0392474687675113 | |
}, | |
"lighteval|mmlu:machine_learning|5": { | |
"acc": 0.36607142857142855, | |
"acc_stderr": 0.0457237235873743 | |
}, | |
"lighteval|mmlu:management|5": { | |
"acc": 0.6504854368932039, | |
"acc_stderr": 0.04721188506097172 | |
}, | |
"lighteval|mmlu:marketing|5": { | |
"acc": 0.6581196581196581, | |
"acc_stderr": 0.031075028526507762 | |
}, | |
"lighteval|mmlu:medical_genetics|5": { | |
"acc": 0.55, | |
"acc_stderr": 0.04999999999999999 | |
}, | |
"lighteval|mmlu:miscellaneous|5": { | |
"acc": 0.6028097062579821, | |
"acc_stderr": 0.017497905037159363 | |
}, | |
"lighteval|mmlu:moral_disputes|5": { | |
"acc": 0.4682080924855491, | |
"acc_stderr": 0.026864624366756646 | |
}, | |
"lighteval|mmlu:moral_scenarios|5": { | |
"acc": 0.25139664804469275, | |
"acc_stderr": 0.014508979453553977 | |
}, | |
"lighteval|mmlu:nutrition|5": { | |
"acc": 0.45098039215686275, | |
"acc_stderr": 0.028491993586171563 | |
}, | |
"lighteval|mmlu:philosophy|5": { | |
"acc": 0.4919614147909968, | |
"acc_stderr": 0.028394421370984545 | |
}, | |
"lighteval|mmlu:prehistory|5": { | |
"acc": 0.5061728395061729, | |
"acc_stderr": 0.027818623962583295 | |
}, | |
"lighteval|mmlu:professional_accounting|5": { | |
"acc": 0.35106382978723405, | |
"acc_stderr": 0.028473501272963764 | |
}, | |
"lighteval|mmlu:professional_law|5": { | |
"acc": 0.35658409387222945, | |
"acc_stderr": 0.01223364298927389 | |
}, | |
"lighteval|mmlu:professional_medicine|5": { | |
"acc": 0.40441176470588236, | |
"acc_stderr": 0.02981263070156974 | |
}, | |
"lighteval|mmlu:professional_psychology|5": { | |
"acc": 0.4542483660130719, | |
"acc_stderr": 0.020142974553795198 | |
}, | |
"lighteval|mmlu:public_relations|5": { | |
"acc": 0.5454545454545454, | |
"acc_stderr": 0.04769300568972745 | |
}, | |
"lighteval|mmlu:security_studies|5": { | |
"acc": 0.47346938775510206, | |
"acc_stderr": 0.03196412734523272 | |
}, | |
"lighteval|mmlu:sociology|5": { | |
"acc": 0.572139303482587, | |
"acc_stderr": 0.03498541988407795 | |
}, | |
"lighteval|mmlu:us_foreign_policy|5": { | |
"acc": 0.71, | |
"acc_stderr": 0.045604802157206845 | |
}, | |
"lighteval|mmlu:virology|5": { | |
"acc": 0.42771084337349397, | |
"acc_stderr": 0.038515976837185335 | |
}, | |
"lighteval|mmlu:world_religions|5": { | |
"acc": 0.6198830409356725, | |
"acc_stderr": 0.03722965741385539 | |
}, | |
"lighteval|mmlu:_average|5": { | |
"acc": 0.4518107712962454, | |
"acc_stderr": 0.03621674337602092 | |
} | |
}, | |
"versions": { | |
"lighteval|mmlu:abstract_algebra|5": 0, | |
"lighteval|mmlu:anatomy|5": 0, | |
"lighteval|mmlu:astronomy|5": 0, | |
"lighteval|mmlu:business_ethics|5": 0, | |
"lighteval|mmlu:clinical_knowledge|5": 0, | |
"lighteval|mmlu:college_biology|5": 0, | |
"lighteval|mmlu:college_chemistry|5": 0, | |
"lighteval|mmlu:college_computer_science|5": 0, | |
"lighteval|mmlu:college_mathematics|5": 0, | |
"lighteval|mmlu:college_medicine|5": 0, | |
"lighteval|mmlu:college_physics|5": 0, | |
"lighteval|mmlu:computer_security|5": 0, | |
"lighteval|mmlu:conceptual_physics|5": 0, | |
"lighteval|mmlu:econometrics|5": 0, | |
"lighteval|mmlu:electrical_engineering|5": 0, | |
"lighteval|mmlu:elementary_mathematics|5": 0, | |
"lighteval|mmlu:formal_logic|5": 0, | |
"lighteval|mmlu:global_facts|5": 0, | |
"lighteval|mmlu:high_school_biology|5": 0, | |
"lighteval|mmlu:high_school_chemistry|5": 0, | |
"lighteval|mmlu:high_school_computer_science|5": 0, | |
"lighteval|mmlu:high_school_european_history|5": 0, | |
"lighteval|mmlu:high_school_geography|5": 0, | |
"lighteval|mmlu:high_school_government_and_politics|5": 0, | |
"lighteval|mmlu:high_school_macroeconomics|5": 0, | |
"lighteval|mmlu:high_school_mathematics|5": 0, | |
"lighteval|mmlu:high_school_microeconomics|5": 0, | |
"lighteval|mmlu:high_school_physics|5": 0, | |
"lighteval|mmlu:high_school_psychology|5": 0, | |
"lighteval|mmlu:high_school_statistics|5": 0, | |
"lighteval|mmlu:high_school_us_history|5": 0, | |
"lighteval|mmlu:high_school_world_history|5": 0, | |
"lighteval|mmlu:human_aging|5": 0, | |
"lighteval|mmlu:human_sexuality|5": 0, | |
"lighteval|mmlu:international_law|5": 0, | |
"lighteval|mmlu:jurisprudence|5": 0, | |
"lighteval|mmlu:logical_fallacies|5": 0, | |
"lighteval|mmlu:machine_learning|5": 0, | |
"lighteval|mmlu:management|5": 0, | |
"lighteval|mmlu:marketing|5": 0, | |
"lighteval|mmlu:medical_genetics|5": 0, | |
"lighteval|mmlu:miscellaneous|5": 0, | |
"lighteval|mmlu:moral_disputes|5": 0, | |
"lighteval|mmlu:moral_scenarios|5": 0, | |
"lighteval|mmlu:nutrition|5": 0, | |
"lighteval|mmlu:philosophy|5": 0, | |
"lighteval|mmlu:prehistory|5": 0, | |
"lighteval|mmlu:professional_accounting|5": 0, | |
"lighteval|mmlu:professional_law|5": 0, | |
"lighteval|mmlu:professional_medicine|5": 0, | |
"lighteval|mmlu:professional_psychology|5": 0, | |
"lighteval|mmlu:public_relations|5": 0, | |
"lighteval|mmlu:security_studies|5": 0, | |
"lighteval|mmlu:sociology|5": 0, | |
"lighteval|mmlu:us_foreign_policy|5": 0, | |
"lighteval|mmlu:virology|5": 0, | |
"lighteval|mmlu:world_religions|5": 0 | |
}, | |
"config_tasks": { | |
"lighteval|mmlu:abstract_algebra": { | |
"name": "mmlu:abstract_algebra", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "abstract_algebra", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:anatomy": { | |
"name": "mmlu:anatomy", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "anatomy", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 135, | |
"effective_num_docs": 135 | |
}, | |
"lighteval|mmlu:astronomy": { | |
"name": "mmlu:astronomy", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "astronomy", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 152, | |
"effective_num_docs": 152 | |
}, | |
"lighteval|mmlu:business_ethics": { | |
"name": "mmlu:business_ethics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "business_ethics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:clinical_knowledge": { | |
"name": "mmlu:clinical_knowledge", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "clinical_knowledge", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265 | |
}, | |
"lighteval|mmlu:college_biology": { | |
"name": "mmlu:college_biology", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "college_biology", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 144, | |
"effective_num_docs": 144 | |
}, | |
"lighteval|mmlu:college_chemistry": { | |
"name": "mmlu:college_chemistry", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "college_chemistry", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:college_computer_science": { | |
"name": "mmlu:college_computer_science", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "college_computer_science", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:college_mathematics": { | |
"name": "mmlu:college_mathematics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "college_mathematics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:college_medicine": { | |
"name": "mmlu:college_medicine", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "college_medicine", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 173, | |
"effective_num_docs": 173 | |
}, | |
"lighteval|mmlu:college_physics": { | |
"name": "mmlu:college_physics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "college_physics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102 | |
}, | |
"lighteval|mmlu:computer_security": { | |
"name": "mmlu:computer_security", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "computer_security", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:conceptual_physics": { | |
"name": "mmlu:conceptual_physics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "conceptual_physics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235 | |
}, | |
"lighteval|mmlu:econometrics": { | |
"name": "mmlu:econometrics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "econometrics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 114, | |
"effective_num_docs": 114 | |
}, | |
"lighteval|mmlu:electrical_engineering": { | |
"name": "mmlu:electrical_engineering", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "electrical_engineering", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145 | |
}, | |
"lighteval|mmlu:elementary_mathematics": { | |
"name": "mmlu:elementary_mathematics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "elementary_mathematics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 378, | |
"effective_num_docs": 378 | |
}, | |
"lighteval|mmlu:formal_logic": { | |
"name": "mmlu:formal_logic", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "formal_logic", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 126, | |
"effective_num_docs": 126 | |
}, | |
"lighteval|mmlu:global_facts": { | |
"name": "mmlu:global_facts", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "global_facts", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:high_school_biology": { | |
"name": "mmlu:high_school_biology", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_biology", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 310, | |
"effective_num_docs": 310 | |
}, | |
"lighteval|mmlu:high_school_chemistry": { | |
"name": "mmlu:high_school_chemistry", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_chemistry", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 203, | |
"effective_num_docs": 203 | |
}, | |
"lighteval|mmlu:high_school_computer_science": { | |
"name": "mmlu:high_school_computer_science", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_computer_science", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:high_school_european_history": { | |
"name": "mmlu:high_school_european_history", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_european_history", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 165, | |
"effective_num_docs": 165 | |
}, | |
"lighteval|mmlu:high_school_geography": { | |
"name": "mmlu:high_school_geography", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_geography", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 198, | |
"effective_num_docs": 198 | |
}, | |
"lighteval|mmlu:high_school_government_and_politics": { | |
"name": "mmlu:high_school_government_and_politics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_government_and_politics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 193, | |
"effective_num_docs": 193 | |
}, | |
"lighteval|mmlu:high_school_macroeconomics": { | |
"name": "mmlu:high_school_macroeconomics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_macroeconomics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 390, | |
"effective_num_docs": 390 | |
}, | |
"lighteval|mmlu:high_school_mathematics": { | |
"name": "mmlu:high_school_mathematics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_mathematics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 270, | |
"effective_num_docs": 270 | |
}, | |
"lighteval|mmlu:high_school_microeconomics": { | |
"name": "mmlu:high_school_microeconomics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_microeconomics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238 | |
}, | |
"lighteval|mmlu:high_school_physics": { | |
"name": "mmlu:high_school_physics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_physics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 151, | |
"effective_num_docs": 151 | |
}, | |
"lighteval|mmlu:high_school_psychology": { | |
"name": "mmlu:high_school_psychology", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_psychology", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 545, | |
"effective_num_docs": 545 | |
}, | |
"lighteval|mmlu:high_school_statistics": { | |
"name": "mmlu:high_school_statistics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_statistics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 216, | |
"effective_num_docs": 216 | |
}, | |
"lighteval|mmlu:high_school_us_history": { | |
"name": "mmlu:high_school_us_history", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_us_history", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 204, | |
"effective_num_docs": 204 | |
}, | |
"lighteval|mmlu:high_school_world_history": { | |
"name": "mmlu:high_school_world_history", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "high_school_world_history", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 237, | |
"effective_num_docs": 237 | |
}, | |
"lighteval|mmlu:human_aging": { | |
"name": "mmlu:human_aging", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "human_aging", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 223, | |
"effective_num_docs": 223 | |
}, | |
"lighteval|mmlu:human_sexuality": { | |
"name": "mmlu:human_sexuality", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "human_sexuality", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 131, | |
"effective_num_docs": 131 | |
}, | |
"lighteval|mmlu:international_law": { | |
"name": "mmlu:international_law", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "international_law", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 121, | |
"effective_num_docs": 121 | |
}, | |
"lighteval|mmlu:jurisprudence": { | |
"name": "mmlu:jurisprudence", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "jurisprudence", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 108, | |
"effective_num_docs": 108 | |
}, | |
"lighteval|mmlu:logical_fallacies": { | |
"name": "mmlu:logical_fallacies", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "logical_fallacies", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 163, | |
"effective_num_docs": 163 | |
}, | |
"lighteval|mmlu:machine_learning": { | |
"name": "mmlu:machine_learning", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "machine_learning", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 112, | |
"effective_num_docs": 112 | |
}, | |
"lighteval|mmlu:management": { | |
"name": "mmlu:management", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "management", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 103, | |
"effective_num_docs": 103 | |
}, | |
"lighteval|mmlu:marketing": { | |
"name": "mmlu:marketing", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "marketing", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 234, | |
"effective_num_docs": 234 | |
}, | |
"lighteval|mmlu:medical_genetics": { | |
"name": "mmlu:medical_genetics", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "medical_genetics", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:miscellaneous": { | |
"name": "mmlu:miscellaneous", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "miscellaneous", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 783, | |
"effective_num_docs": 783 | |
}, | |
"lighteval|mmlu:moral_disputes": { | |
"name": "mmlu:moral_disputes", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "moral_disputes", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 346, | |
"effective_num_docs": 346 | |
}, | |
"lighteval|mmlu:moral_scenarios": { | |
"name": "mmlu:moral_scenarios", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "moral_scenarios", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895 | |
}, | |
"lighteval|mmlu:nutrition": { | |
"name": "mmlu:nutrition", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "nutrition", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306 | |
}, | |
"lighteval|mmlu:philosophy": { | |
"name": "mmlu:philosophy", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "philosophy", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 311, | |
"effective_num_docs": 311 | |
}, | |
"lighteval|mmlu:prehistory": { | |
"name": "mmlu:prehistory", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "prehistory", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 324, | |
"effective_num_docs": 324 | |
}, | |
"lighteval|mmlu:professional_accounting": { | |
"name": "mmlu:professional_accounting", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "professional_accounting", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 282, | |
"effective_num_docs": 282 | |
}, | |
"lighteval|mmlu:professional_law": { | |
"name": "mmlu:professional_law", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "professional_law", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 1534, | |
"effective_num_docs": 1534 | |
}, | |
"lighteval|mmlu:professional_medicine": { | |
"name": "mmlu:professional_medicine", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "professional_medicine", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 272, | |
"effective_num_docs": 272 | |
}, | |
"lighteval|mmlu:professional_psychology": { | |
"name": "mmlu:professional_psychology", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "professional_psychology", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612 | |
}, | |
"lighteval|mmlu:public_relations": { | |
"name": "mmlu:public_relations", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "public_relations", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 110, | |
"effective_num_docs": 110 | |
}, | |
"lighteval|mmlu:security_studies": { | |
"name": "mmlu:security_studies", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "security_studies", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 245, | |
"effective_num_docs": 245 | |
}, | |
"lighteval|mmlu:sociology": { | |
"name": "mmlu:sociology", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "sociology", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 201, | |
"effective_num_docs": 201 | |
}, | |
"lighteval|mmlu:us_foreign_policy": { | |
"name": "mmlu:us_foreign_policy", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "us_foreign_policy", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100 | |
}, | |
"lighteval|mmlu:virology": { | |
"name": "mmlu:virology", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "virology", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 166, | |
"effective_num_docs": 166 | |
}, | |
"lighteval|mmlu:world_religions": { | |
"name": "mmlu:world_religions", | |
"prompt_function": "mmlu_harness", | |
"hf_repo": "lighteval/mmlu", | |
"hf_subset": "world_religions", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"auxiliary_train", | |
"test", | |
"validation", | |
"dev" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "dev", | |
"few_shots_select": "sequential", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval", | |
"mmlu" | |
], | |
"original_num_docs": 171, | |
"effective_num_docs": 171 | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|mmlu:abstract_algebra|5": { | |
"hashes": { | |
"hash_examples": "4c76229e00c9c0e9", | |
"hash_full_prompts": "7c9d9ba119b752af", | |
"hash_input_tokens": "75c7d54f6578e9fa", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:anatomy|5": { | |
"hashes": { | |
"hash_examples": "6a1f8104dccbd33b", | |
"hash_full_prompts": "631bdf6bc3f09d51", | |
"hash_input_tokens": "a26b05fcb6bed700", | |
"hash_cont_tokens": "decb7a6e57fe3ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 135, | |
"padded": 540, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:astronomy|5": { | |
"hashes": { | |
"hash_examples": "1302effa3a76ce4c", | |
"hash_full_prompts": "da4c3751e1dd6326", | |
"hash_input_tokens": "7dd0e3fceaecc21c", | |
"hash_cont_tokens": "7ff0fe95d11430f4" | |
}, | |
"truncated": 0, | |
"non_truncated": 152, | |
"padded": 608, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:business_ethics|5": { | |
"hashes": { | |
"hash_examples": "03cb8bce5336419a", | |
"hash_full_prompts": "b3c2ede0269b5d7a", | |
"hash_input_tokens": "861925f91888e927", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:clinical_knowledge|5": { | |
"hashes": { | |
"hash_examples": "ffbb9c7b2be257f9", | |
"hash_full_prompts": "da31d1e8dea61316", | |
"hash_input_tokens": "31d9ea62f21515c4", | |
"hash_cont_tokens": "f665edac0d9be625" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 1060, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:college_biology|5": { | |
"hashes": { | |
"hash_examples": "3ee77f176f38eb8e", | |
"hash_full_prompts": "0754a7a18e5bab3f", | |
"hash_input_tokens": "647d518c3ded39be", | |
"hash_cont_tokens": "4617991bb3d1351a" | |
}, | |
"truncated": 0, | |
"non_truncated": 144, | |
"padded": 576, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:college_chemistry|5": { | |
"hashes": { | |
"hash_examples": "ce61a69c46d47aeb", | |
"hash_full_prompts": "1f84199594afc309", | |
"hash_input_tokens": "3e1ab5bc96ab8629", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:college_computer_science|5": { | |
"hashes": { | |
"hash_examples": "32805b52d7d5daab", | |
"hash_full_prompts": "e338460a75b194ca", | |
"hash_input_tokens": "6b8c8bf94166bfa4", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:college_mathematics|5": { | |
"hashes": { | |
"hash_examples": "55da1a0a0bd33722", | |
"hash_full_prompts": "f1d436b51a050ee0", | |
"hash_input_tokens": "1da67d18b741d3a2", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:college_medicine|5": { | |
"hashes": { | |
"hash_examples": "c33e143163049176", | |
"hash_full_prompts": "7bc815e341c46a75", | |
"hash_input_tokens": "2978db187297a1f6", | |
"hash_cont_tokens": "bd325b88c02348f1" | |
}, | |
"truncated": 0, | |
"non_truncated": 173, | |
"padded": 692, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:college_physics|5": { | |
"hashes": { | |
"hash_examples": "ebdab1cdb7e555df", | |
"hash_full_prompts": "7418fe7c749f42ad", | |
"hash_input_tokens": "bb1414dac5a972c4", | |
"hash_cont_tokens": "e818f6ac32d68b15" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 408, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:computer_security|5": { | |
"hashes": { | |
"hash_examples": "a24fd7d08a560921", | |
"hash_full_prompts": "2998f22a584263c7", | |
"hash_input_tokens": "a62eb781cd42d3cf", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:conceptual_physics|5": { | |
"hashes": { | |
"hash_examples": "8300977a79386993", | |
"hash_full_prompts": "bf76ba09bd4d9188", | |
"hash_input_tokens": "1620d18b60f12256", | |
"hash_cont_tokens": "ed2202f924cddc4e" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 940, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:econometrics|5": { | |
"hashes": { | |
"hash_examples": "ddde36788a04a46f", | |
"hash_full_prompts": "4b5d05c7af133971", | |
"hash_input_tokens": "da7a4612c4545817", | |
"hash_cont_tokens": "792952748433f6d1" | |
}, | |
"truncated": 0, | |
"non_truncated": 114, | |
"padded": 456, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:electrical_engineering|5": { | |
"hashes": { | |
"hash_examples": "acbc5def98c19b3f", | |
"hash_full_prompts": "435c82ba677eacce", | |
"hash_input_tokens": "55eb1d23c0b6b4ca", | |
"hash_cont_tokens": "799539fe7a7bd73d" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 580, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:elementary_mathematics|5": { | |
"hashes": { | |
"hash_examples": "146e61d07497a9bd", | |
"hash_full_prompts": "248a015237ddc9fe", | |
"hash_input_tokens": "55f29e974f92a33a", | |
"hash_cont_tokens": "bc61f0018e6517d5" | |
}, | |
"truncated": 0, | |
"non_truncated": 378, | |
"padded": 1512, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:formal_logic|5": { | |
"hashes": { | |
"hash_examples": "8635216e1909a03f", | |
"hash_full_prompts": "1ba5a2de7d2cd8d5", | |
"hash_input_tokens": "986a57cfabdef747", | |
"hash_cont_tokens": "adcc4172d72cbdf2" | |
}, | |
"truncated": 0, | |
"non_truncated": 126, | |
"padded": 504, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:global_facts|5": { | |
"hashes": { | |
"hash_examples": "30b315aa6353ee47", | |
"hash_full_prompts": "7d3f85dd426d8345", | |
"hash_input_tokens": "ed773ce4045ae786", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_biology|5": { | |
"hashes": { | |
"hash_examples": "c9136373af2180de", | |
"hash_full_prompts": "902b7a68fa61fc1f", | |
"hash_input_tokens": "f2a49937c39dfaf2", | |
"hash_cont_tokens": "7d9d3af2627a0493" | |
}, | |
"truncated": 0, | |
"non_truncated": 310, | |
"padded": 1240, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_chemistry|5": { | |
"hashes": { | |
"hash_examples": "b0661bfa1add6404", | |
"hash_full_prompts": "6761eef4caf75655", | |
"hash_input_tokens": "8d03247e74644712", | |
"hash_cont_tokens": "e4d88f43dc166f96" | |
}, | |
"truncated": 0, | |
"non_truncated": 203, | |
"padded": 812, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_computer_science|5": { | |
"hashes": { | |
"hash_examples": "80fc1d623a3d665f", | |
"hash_full_prompts": "82724051031d8a03", | |
"hash_input_tokens": "9e5ae7ff6e979f4f", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_european_history|5": { | |
"hashes": { | |
"hash_examples": "854da6e5af0fe1a1", | |
"hash_full_prompts": "58bb8a0e49ae7876", | |
"hash_input_tokens": "393aa37bf0b20089", | |
"hash_cont_tokens": "c6a5ba5bb82443be" | |
}, | |
"truncated": 0, | |
"non_truncated": 165, | |
"padded": 656, | |
"non_padded": 4, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_geography|5": { | |
"hashes": { | |
"hash_examples": "7dc963c7acd19ad8", | |
"hash_full_prompts": "c757bb898063b587", | |
"hash_input_tokens": "bf438d1cce6f5147", | |
"hash_cont_tokens": "082ed9c44f51978d" | |
}, | |
"truncated": 0, | |
"non_truncated": 198, | |
"padded": 792, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_government_and_politics|5": { | |
"hashes": { | |
"hash_examples": "1f675dcdebc9758f", | |
"hash_full_prompts": "863c5dfe4a2f2e59", | |
"hash_input_tokens": "0f349c66471d1b58", | |
"hash_cont_tokens": "04251113c9b13e3e" | |
}, | |
"truncated": 0, | |
"non_truncated": 193, | |
"padded": 772, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_macroeconomics|5": { | |
"hashes": { | |
"hash_examples": "2fb32cf2d80f0b35", | |
"hash_full_prompts": "1486cdbb04a74586", | |
"hash_input_tokens": "fce1f68346ff104c", | |
"hash_cont_tokens": "20d8b70a24e326da" | |
}, | |
"truncated": 0, | |
"non_truncated": 390, | |
"padded": 1560, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_mathematics|5": { | |
"hashes": { | |
"hash_examples": "fd6646fdb5d58a1f", | |
"hash_full_prompts": "32cc88b9a4f93484", | |
"hash_input_tokens": "aba35e5dd7b6a663", | |
"hash_cont_tokens": "e6486b2f60a4c9c5" | |
}, | |
"truncated": 0, | |
"non_truncated": 270, | |
"padded": 1080, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_microeconomics|5": { | |
"hashes": { | |
"hash_examples": "2118f21f71d87d84", | |
"hash_full_prompts": "9a04289987a587ab", | |
"hash_input_tokens": "8661afcb1196f2b2", | |
"hash_cont_tokens": "bb7f88cdb94a1d06" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 941, | |
"non_padded": 11, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_physics|5": { | |
"hashes": { | |
"hash_examples": "dc3ce06378548565", | |
"hash_full_prompts": "fe54e7903284f607", | |
"hash_input_tokens": "2bc4290f1cb0ca26", | |
"hash_cont_tokens": "06e274c7d91752f6" | |
}, | |
"truncated": 0, | |
"non_truncated": 151, | |
"padded": 594, | |
"non_padded": 10, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_psychology|5": { | |
"hashes": { | |
"hash_examples": "c8d1d98a40e11f2f", | |
"hash_full_prompts": "60281df9dcff5e18", | |
"hash_input_tokens": "75271862f49ee687", | |
"hash_cont_tokens": "f511e526872e16c6" | |
}, | |
"truncated": 0, | |
"non_truncated": 545, | |
"padded": 2168, | |
"non_padded": 12, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_statistics|5": { | |
"hashes": { | |
"hash_examples": "666c8759b98ee4ff", | |
"hash_full_prompts": "550c19346d5b17a1", | |
"hash_input_tokens": "e1b46fdd3aabe928", | |
"hash_cont_tokens": "df3d595335ee8031" | |
}, | |
"truncated": 0, | |
"non_truncated": 216, | |
"padded": 864, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_us_history|5": { | |
"hashes": { | |
"hash_examples": "95fef1c4b7d3f81e", | |
"hash_full_prompts": "e7722fac616f2bc7", | |
"hash_input_tokens": "b107620e8f0b6017", | |
"hash_cont_tokens": "0342cc2d391d1887" | |
}, | |
"truncated": 0, | |
"non_truncated": 204, | |
"padded": 816, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:high_school_world_history|5": { | |
"hashes": { | |
"hash_examples": "7e5085b6184b0322", | |
"hash_full_prompts": "f7d557957068f832", | |
"hash_input_tokens": "13eceb74f8b84c17", | |
"hash_cont_tokens": "6e90de2a78358a9a" | |
}, | |
"truncated": 0, | |
"non_truncated": 237, | |
"padded": 948, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:human_aging|5": { | |
"hashes": { | |
"hash_examples": "c17333e7c7c10797", | |
"hash_full_prompts": "ae274a35d2f1e859", | |
"hash_input_tokens": "90e88566a1ccf417", | |
"hash_cont_tokens": "93b9a45a73c2421a" | |
}, | |
"truncated": 0, | |
"non_truncated": 223, | |
"padded": 892, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:human_sexuality|5": { | |
"hashes": { | |
"hash_examples": "4edd1e9045df5e3d", | |
"hash_full_prompts": "6bac1d457966897d", | |
"hash_input_tokens": "2c4c84837b6a0f69", | |
"hash_cont_tokens": "0c951883e05658a5" | |
}, | |
"truncated": 0, | |
"non_truncated": 131, | |
"padded": 524, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:international_law|5": { | |
"hashes": { | |
"hash_examples": "db2fa00d771a062a", | |
"hash_full_prompts": "94a6166fd48d9c04", | |
"hash_input_tokens": "fe99ed0a9f9e7c58", | |
"hash_cont_tokens": "103f337a6d09c16d" | |
}, | |
"truncated": 0, | |
"non_truncated": 121, | |
"padded": 484, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:jurisprudence|5": { | |
"hashes": { | |
"hash_examples": "e956f86b124076fe", | |
"hash_full_prompts": "e8598afd571b4398", | |
"hash_input_tokens": "38b698d460c70f7d", | |
"hash_cont_tokens": "ef67d4557cc9d27e" | |
}, | |
"truncated": 0, | |
"non_truncated": 108, | |
"padded": 428, | |
"non_padded": 4, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:logical_fallacies|5": { | |
"hashes": { | |
"hash_examples": "956e0e6365ab79f1", | |
"hash_full_prompts": "6e85f5248cbb1ed3", | |
"hash_input_tokens": "6178a957d3082e0b", | |
"hash_cont_tokens": "29ad98c6a265e4b0" | |
}, | |
"truncated": 0, | |
"non_truncated": 163, | |
"padded": 648, | |
"non_padded": 4, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:machine_learning|5": { | |
"hashes": { | |
"hash_examples": "397997cc6f4d581e", | |
"hash_full_prompts": "809aa23563504bd3", | |
"hash_input_tokens": "95c00eacd07d690d", | |
"hash_cont_tokens": "e8924508f2fa3911" | |
}, | |
"truncated": 0, | |
"non_truncated": 112, | |
"padded": 448, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:management|5": { | |
"hashes": { | |
"hash_examples": "2bcbe6f6ca63d740", | |
"hash_full_prompts": "d4773d0f25c2f740", | |
"hash_input_tokens": "8126d4fa67a3ff80", | |
"hash_cont_tokens": "4e9536f766ed1acd" | |
}, | |
"truncated": 0, | |
"non_truncated": 103, | |
"padded": 412, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:marketing|5": { | |
"hashes": { | |
"hash_examples": "8ddb20d964a1b065", | |
"hash_full_prompts": "3a4bca3d5678a20e", | |
"hash_input_tokens": "15f615ba0a66042d", | |
"hash_cont_tokens": "842caa104fbbd83a" | |
}, | |
"truncated": 0, | |
"non_truncated": 234, | |
"padded": 936, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:medical_genetics|5": { | |
"hashes": { | |
"hash_examples": "182a71f4763d2cea", | |
"hash_full_prompts": "1d30abac80fbc825", | |
"hash_input_tokens": "5075165edc036ef8", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:miscellaneous|5": { | |
"hashes": { | |
"hash_examples": "4c404fdbb4ca57fc", | |
"hash_full_prompts": "3c35c5a7001c1556", | |
"hash_input_tokens": "c8880b49407d0d91", | |
"hash_cont_tokens": "106e2991446612f9" | |
}, | |
"truncated": 0, | |
"non_truncated": 783, | |
"padded": 3132, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:moral_disputes|5": { | |
"hashes": { | |
"hash_examples": "60cbd2baa3fea5c9", | |
"hash_full_prompts": "b8386f82f58bdcb8", | |
"hash_input_tokens": "51262567d75bd68c", | |
"hash_cont_tokens": "5ec72693c51eb73a" | |
}, | |
"truncated": 0, | |
"non_truncated": 346, | |
"padded": 1372, | |
"non_padded": 12, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:moral_scenarios|5": { | |
"hashes": { | |
"hash_examples": "fd8b0431fbdd75ef", | |
"hash_full_prompts": "ced5816c3c19a109", | |
"hash_input_tokens": "b7b035e6652c473c", | |
"hash_cont_tokens": "e87aff3328b2b886" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3487, | |
"non_padded": 93, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:nutrition|5": { | |
"hashes": { | |
"hash_examples": "71e55e2b829b6528", | |
"hash_full_prompts": "e4811bc73fb55f93", | |
"hash_input_tokens": "34f448a7e3803e49", | |
"hash_cont_tokens": "f4976a9d8ee6e1a8" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1224, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:philosophy|5": { | |
"hashes": { | |
"hash_examples": "a6d489a8d208fa4b", | |
"hash_full_prompts": "46563a2513cad867", | |
"hash_input_tokens": "4054204011f58a70", | |
"hash_cont_tokens": "154622d856e8f999" | |
}, | |
"truncated": 0, | |
"non_truncated": 311, | |
"padded": 1244, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:prehistory|5": { | |
"hashes": { | |
"hash_examples": "6cc50f032a19acaa", | |
"hash_full_prompts": "902e10c1fd491a5e", | |
"hash_input_tokens": "5c5ae944b45be6ef", | |
"hash_cont_tokens": "4e2e5d12ba1e8e1d" | |
}, | |
"truncated": 0, | |
"non_truncated": 324, | |
"padded": 1264, | |
"non_padded": 32, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:professional_accounting|5": { | |
"hashes": { | |
"hash_examples": "50f57ab32f5f6cea", | |
"hash_full_prompts": "39508d02873ff33d", | |
"hash_input_tokens": "80c53d6f4aab7861", | |
"hash_cont_tokens": "e7386873647b9650" | |
}, | |
"truncated": 0, | |
"non_truncated": 282, | |
"padded": 1112, | |
"non_padded": 16, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:professional_law|5": { | |
"hashes": { | |
"hash_examples": "a8fdc85c64f4b215", | |
"hash_full_prompts": "b851131c935cbc94", | |
"hash_input_tokens": "a3a8685d520b83f2", | |
"hash_cont_tokens": "5d6b3b85a5450cce" | |
}, | |
"truncated": 0, | |
"non_truncated": 1534, | |
"padded": 6136, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:professional_medicine|5": { | |
"hashes": { | |
"hash_examples": "c373a28a3050a73a", | |
"hash_full_prompts": "ff7105f07ce6ec3b", | |
"hash_input_tokens": "675231d040779f7e", | |
"hash_cont_tokens": "fe60425286ba5730" | |
}, | |
"truncated": 0, | |
"non_truncated": 272, | |
"padded": 1088, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:professional_psychology|5": { | |
"hashes": { | |
"hash_examples": "bf5254fe818356af", | |
"hash_full_prompts": "cea1b4315e71e9d1", | |
"hash_input_tokens": "e9a2215b8db6c603", | |
"hash_cont_tokens": "657f9c0e15de6ca0" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2440, | |
"non_padded": 8, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:public_relations|5": { | |
"hashes": { | |
"hash_examples": "b66d52e28e7d14e0", | |
"hash_full_prompts": "1b2b7726311215e3", | |
"hash_input_tokens": "fd73c575ca83af99", | |
"hash_cont_tokens": "e4c5fcac709f3a59" | |
}, | |
"truncated": 0, | |
"non_truncated": 110, | |
"padded": 432, | |
"non_padded": 8, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:security_studies|5": { | |
"hashes": { | |
"hash_examples": "514c14feaf000ad9", | |
"hash_full_prompts": "7d17a8e20c60d97c", | |
"hash_input_tokens": "895c74db00c6a743", | |
"hash_cont_tokens": "a529372b15216169" | |
}, | |
"truncated": 0, | |
"non_truncated": 245, | |
"padded": 980, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:sociology|5": { | |
"hashes": { | |
"hash_examples": "f6c9bc9d18c80870", | |
"hash_full_prompts": "ecca93b95e23c984", | |
"hash_input_tokens": "b8e4605b6d0d2bd5", | |
"hash_cont_tokens": "3705b7f2b83cfcbb" | |
}, | |
"truncated": 0, | |
"non_truncated": 201, | |
"padded": 804, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:us_foreign_policy|5": { | |
"hashes": { | |
"hash_examples": "ed7b78629db6678f", | |
"hash_full_prompts": "13d6d2f5aff5be79", | |
"hash_input_tokens": "8c0ddc00cbe258e4", | |
"hash_cont_tokens": "873c8e846c423dcd" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:virology|5": { | |
"hashes": { | |
"hash_examples": "bc52ffdc3f9b994a", | |
"hash_full_prompts": "53db62b8ca08c2e4", | |
"hash_input_tokens": "72c0da01d3949de0", | |
"hash_cont_tokens": "ceee975befbadd57" | |
}, | |
"truncated": 0, | |
"non_truncated": 166, | |
"padded": 664, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|mmlu:world_religions|5": { | |
"hashes": { | |
"hash_examples": "ecdb4a4f94f62930", | |
"hash_full_prompts": "640f8a30cad5b30a", | |
"hash_input_tokens": "2ee21d840ebba8dc", | |
"hash_cont_tokens": "47af4cba3ad4d72b" | |
}, | |
"truncated": 0, | |
"non_truncated": 171, | |
"padded": 684, | |
"non_padded": 0, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "341a076d0beb7048", | |
"hash_full_prompts": "1c57e5e8895a9ccc", | |
"hash_input_tokens": "8ee3889bf1ffb521", | |
"hash_cont_tokens": "e993889140660bc5" | |
}, | |
"truncated": 0, | |
"non_truncated": 14042, | |
"padded": 55954, | |
"non_padded": 214, | |
"num_truncated_few_shots": 0 | |
} | |
} |