lewtun's picture
lewtun HF staff
Upload eval_results/deepseek-ai/deepseek-llm-67b-chat/main/mmlu/results_2024-03-05T02-28-33.784134.json with huggingface_hub
59d72ec verified
raw
history blame
78.1 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 24972.998985198,
"end_time": 40207.693627466,
"total_evaluation_time_secondes": "15234.694642267998",
"model_name": "deepseek-ai/deepseek-llm-67b-chat",
"model_sha": "79648bef7658bb824e4630740f6e1484c1b0620b",
"model_dtype": "torch.bfloat16",
"model_size": "125.78 GB",
"config": null
},
"results": {
"lighteval|mmlu:abstract_algebra|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836
},
"lighteval|mmlu:anatomy|5": {
"acc": 0.6814814814814815,
"acc_stderr": 0.04024778401977109
},
"lighteval|mmlu:astronomy|5": {
"acc": 0.8157894736842105,
"acc_stderr": 0.031546980450822305
},
"lighteval|mmlu:business_ethics|5": {
"acc": 0.78,
"acc_stderr": 0.041633319989322626
},
"lighteval|mmlu:clinical_knowledge|5": {
"acc": 0.7811320754716982,
"acc_stderr": 0.025447863825108625
},
"lighteval|mmlu:college_biology|5": {
"acc": 0.8333333333333334,
"acc_stderr": 0.031164899666948617
},
"lighteval|mmlu:college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605
},
"lighteval|mmlu:college_computer_science|5": {
"acc": 0.55,
"acc_stderr": 0.05
},
"lighteval|mmlu:college_mathematics|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589
},
"lighteval|mmlu:college_medicine|5": {
"acc": 0.6763005780346821,
"acc_stderr": 0.03567603799639171
},
"lighteval|mmlu:college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.048971049527263666
},
"lighteval|mmlu:computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909284
},
"lighteval|mmlu:conceptual_physics|5": {
"acc": 0.7021276595744681,
"acc_stderr": 0.02989614568209546
},
"lighteval|mmlu:econometrics|5": {
"acc": 0.5,
"acc_stderr": 0.047036043419179864
},
"lighteval|mmlu:electrical_engineering|5": {
"acc": 0.6689655172413793,
"acc_stderr": 0.03921545312467122
},
"lighteval|mmlu:elementary_mathematics|5": {
"acc": 0.5158730158730159,
"acc_stderr": 0.025738330639412152
},
"lighteval|mmlu:formal_logic|5": {
"acc": 0.5634920634920635,
"acc_stderr": 0.04435932892851466
},
"lighteval|mmlu:global_facts|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102
},
"lighteval|mmlu:high_school_biology|5": {
"acc": 0.8193548387096774,
"acc_stderr": 0.02188617856717254
},
"lighteval|mmlu:high_school_chemistry|5": {
"acc": 0.5320197044334976,
"acc_stderr": 0.035107665979592174
},
"lighteval|mmlu:high_school_computer_science|5": {
"acc": 0.74,
"acc_stderr": 0.044084400227680794
},
"lighteval|mmlu:high_school_european_history|5": {
"acc": 0.8424242424242424,
"acc_stderr": 0.028450388805284332
},
"lighteval|mmlu:high_school_geography|5": {
"acc": 0.8838383838383839,
"acc_stderr": 0.022828881775249377
},
"lighteval|mmlu:high_school_government_and_politics|5": {
"acc": 0.9430051813471503,
"acc_stderr": 0.016731085293607537
},
"lighteval|mmlu:high_school_macroeconomics|5": {
"acc": 0.717948717948718,
"acc_stderr": 0.02281581309889661
},
"lighteval|mmlu:high_school_mathematics|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.029045600290616258
},
"lighteval|mmlu:high_school_microeconomics|5": {
"acc": 0.8445378151260504,
"acc_stderr": 0.023536818625398897
},
"lighteval|mmlu:high_school_physics|5": {
"acc": 0.423841059602649,
"acc_stderr": 0.04034846678603397
},
"lighteval|mmlu:high_school_psychology|5": {
"acc": 0.8825688073394495,
"acc_stderr": 0.013802780227377348
},
"lighteval|mmlu:high_school_statistics|5": {
"acc": 0.6898148148148148,
"acc_stderr": 0.03154696285656629
},
"lighteval|mmlu:high_school_us_history|5": {
"acc": 0.9068627450980392,
"acc_stderr": 0.020397853969427
},
"lighteval|mmlu:high_school_world_history|5": {
"acc": 0.8945147679324894,
"acc_stderr": 0.019995560723758556
},
"lighteval|mmlu:human_aging|5": {
"acc": 0.7802690582959642,
"acc_stderr": 0.027790177064383595
},
"lighteval|mmlu:human_sexuality|5": {
"acc": 0.8396946564885496,
"acc_stderr": 0.03217829420744631
},
"lighteval|mmlu:international_law|5": {
"acc": 0.8264462809917356,
"acc_stderr": 0.03457272836917671
},
"lighteval|mmlu:jurisprudence|5": {
"acc": 0.8148148148148148,
"acc_stderr": 0.03755265865037183
},
"lighteval|mmlu:logical_fallacies|5": {
"acc": 0.8282208588957055,
"acc_stderr": 0.029634717272371037
},
"lighteval|mmlu:machine_learning|5": {
"acc": 0.5714285714285714,
"acc_stderr": 0.04697113923010213
},
"lighteval|mmlu:management|5": {
"acc": 0.912621359223301,
"acc_stderr": 0.027960689125970654
},
"lighteval|mmlu:marketing|5": {
"acc": 0.905982905982906,
"acc_stderr": 0.019119892798924978
},
"lighteval|mmlu:medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394
},
"lighteval|mmlu:miscellaneous|5": {
"acc": 0.9067688378033205,
"acc_stderr": 0.010397417087292856
},
"lighteval|mmlu:moral_disputes|5": {
"acc": 0.7803468208092486,
"acc_stderr": 0.02228963885261789
},
"lighteval|mmlu:moral_scenarios|5": {
"acc": 0.5720670391061452,
"acc_stderr": 0.016547887997416112
},
"lighteval|mmlu:nutrition|5": {
"acc": 0.7679738562091504,
"acc_stderr": 0.024170840879340863
},
"lighteval|mmlu:philosophy|5": {
"acc": 0.7909967845659164,
"acc_stderr": 0.023093140398374224
},
"lighteval|mmlu:prehistory|5": {
"acc": 0.845679012345679,
"acc_stderr": 0.02010083099985099
},
"lighteval|mmlu:professional_accounting|5": {
"acc": 0.5354609929078015,
"acc_stderr": 0.029752389657427054
},
"lighteval|mmlu:professional_law|5": {
"acc": 0.559973924380704,
"acc_stderr": 0.012678037478574513
},
"lighteval|mmlu:professional_medicine|5": {
"acc": 0.7463235294117647,
"acc_stderr": 0.026431329870789534
},
"lighteval|mmlu:professional_psychology|5": {
"acc": 0.7761437908496732,
"acc_stderr": 0.016863008585416613
},
"lighteval|mmlu:public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.04494290866252091
},
"lighteval|mmlu:security_studies|5": {
"acc": 0.7836734693877551,
"acc_stderr": 0.026358916334904028
},
"lighteval|mmlu:sociology|5": {
"acc": 0.8606965174129353,
"acc_stderr": 0.024484487162913973
},
"lighteval|mmlu:us_foreign_policy|5": {
"acc": 0.88,
"acc_stderr": 0.03265986323710906
},
"lighteval|mmlu:virology|5": {
"acc": 0.5602409638554217,
"acc_stderr": 0.03864139923699121
},
"lighteval|mmlu:world_religions|5": {
"acc": 0.8421052631578947,
"acc_stderr": 0.027966785859160886
},
"lighteval|mmlu:_average|5": {
"acc": 0.7117508019628811,
"acc_stderr": 0.03160333696115359
}
},
"versions": {
"lighteval|mmlu:abstract_algebra|5": 0,
"lighteval|mmlu:anatomy|5": 0,
"lighteval|mmlu:astronomy|5": 0,
"lighteval|mmlu:business_ethics|5": 0,
"lighteval|mmlu:clinical_knowledge|5": 0,
"lighteval|mmlu:college_biology|5": 0,
"lighteval|mmlu:college_chemistry|5": 0,
"lighteval|mmlu:college_computer_science|5": 0,
"lighteval|mmlu:college_mathematics|5": 0,
"lighteval|mmlu:college_medicine|5": 0,
"lighteval|mmlu:college_physics|5": 0,
"lighteval|mmlu:computer_security|5": 0,
"lighteval|mmlu:conceptual_physics|5": 0,
"lighteval|mmlu:econometrics|5": 0,
"lighteval|mmlu:electrical_engineering|5": 0,
"lighteval|mmlu:elementary_mathematics|5": 0,
"lighteval|mmlu:formal_logic|5": 0,
"lighteval|mmlu:global_facts|5": 0,
"lighteval|mmlu:high_school_biology|5": 0,
"lighteval|mmlu:high_school_chemistry|5": 0,
"lighteval|mmlu:high_school_computer_science|5": 0,
"lighteval|mmlu:high_school_european_history|5": 0,
"lighteval|mmlu:high_school_geography|5": 0,
"lighteval|mmlu:high_school_government_and_politics|5": 0,
"lighteval|mmlu:high_school_macroeconomics|5": 0,
"lighteval|mmlu:high_school_mathematics|5": 0,
"lighteval|mmlu:high_school_microeconomics|5": 0,
"lighteval|mmlu:high_school_physics|5": 0,
"lighteval|mmlu:high_school_psychology|5": 0,
"lighteval|mmlu:high_school_statistics|5": 0,
"lighteval|mmlu:high_school_us_history|5": 0,
"lighteval|mmlu:high_school_world_history|5": 0,
"lighteval|mmlu:human_aging|5": 0,
"lighteval|mmlu:human_sexuality|5": 0,
"lighteval|mmlu:international_law|5": 0,
"lighteval|mmlu:jurisprudence|5": 0,
"lighteval|mmlu:logical_fallacies|5": 0,
"lighteval|mmlu:machine_learning|5": 0,
"lighteval|mmlu:management|5": 0,
"lighteval|mmlu:marketing|5": 0,
"lighteval|mmlu:medical_genetics|5": 0,
"lighteval|mmlu:miscellaneous|5": 0,
"lighteval|mmlu:moral_disputes|5": 0,
"lighteval|mmlu:moral_scenarios|5": 0,
"lighteval|mmlu:nutrition|5": 0,
"lighteval|mmlu:philosophy|5": 0,
"lighteval|mmlu:prehistory|5": 0,
"lighteval|mmlu:professional_accounting|5": 0,
"lighteval|mmlu:professional_law|5": 0,
"lighteval|mmlu:professional_medicine|5": 0,
"lighteval|mmlu:professional_psychology|5": 0,
"lighteval|mmlu:public_relations|5": 0,
"lighteval|mmlu:security_studies|5": 0,
"lighteval|mmlu:sociology|5": 0,
"lighteval|mmlu:us_foreign_policy|5": 0,
"lighteval|mmlu:virology|5": 0,
"lighteval|mmlu:world_religions|5": 0
},
"config_tasks": {
"lighteval|mmlu:abstract_algebra": {
"name": "mmlu:abstract_algebra",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "abstract_algebra",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:anatomy": {
"name": "mmlu:anatomy",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "anatomy",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 135,
"effective_num_docs": 135
},
"lighteval|mmlu:astronomy": {
"name": "mmlu:astronomy",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "astronomy",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 152,
"effective_num_docs": 152
},
"lighteval|mmlu:business_ethics": {
"name": "mmlu:business_ethics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "business_ethics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:clinical_knowledge": {
"name": "mmlu:clinical_knowledge",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "clinical_knowledge",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 265,
"effective_num_docs": 265
},
"lighteval|mmlu:college_biology": {
"name": "mmlu:college_biology",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_biology",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 144,
"effective_num_docs": 144
},
"lighteval|mmlu:college_chemistry": {
"name": "mmlu:college_chemistry",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_chemistry",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:college_computer_science": {
"name": "mmlu:college_computer_science",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_computer_science",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:college_mathematics": {
"name": "mmlu:college_mathematics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_mathematics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:college_medicine": {
"name": "mmlu:college_medicine",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_medicine",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 173,
"effective_num_docs": 173
},
"lighteval|mmlu:college_physics": {
"name": "mmlu:college_physics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "college_physics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 102,
"effective_num_docs": 102
},
"lighteval|mmlu:computer_security": {
"name": "mmlu:computer_security",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "computer_security",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:conceptual_physics": {
"name": "mmlu:conceptual_physics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "conceptual_physics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 235,
"effective_num_docs": 235
},
"lighteval|mmlu:econometrics": {
"name": "mmlu:econometrics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "econometrics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 114,
"effective_num_docs": 114
},
"lighteval|mmlu:electrical_engineering": {
"name": "mmlu:electrical_engineering",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "electrical_engineering",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 145,
"effective_num_docs": 145
},
"lighteval|mmlu:elementary_mathematics": {
"name": "mmlu:elementary_mathematics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "elementary_mathematics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 378,
"effective_num_docs": 378
},
"lighteval|mmlu:formal_logic": {
"name": "mmlu:formal_logic",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "formal_logic",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 126,
"effective_num_docs": 126
},
"lighteval|mmlu:global_facts": {
"name": "mmlu:global_facts",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "global_facts",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:high_school_biology": {
"name": "mmlu:high_school_biology",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_biology",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 310,
"effective_num_docs": 310
},
"lighteval|mmlu:high_school_chemistry": {
"name": "mmlu:high_school_chemistry",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_chemistry",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 203,
"effective_num_docs": 203
},
"lighteval|mmlu:high_school_computer_science": {
"name": "mmlu:high_school_computer_science",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_computer_science",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:high_school_european_history": {
"name": "mmlu:high_school_european_history",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_european_history",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 165,
"effective_num_docs": 165
},
"lighteval|mmlu:high_school_geography": {
"name": "mmlu:high_school_geography",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_geography",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 198,
"effective_num_docs": 198
},
"lighteval|mmlu:high_school_government_and_politics": {
"name": "mmlu:high_school_government_and_politics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_government_and_politics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 193,
"effective_num_docs": 193
},
"lighteval|mmlu:high_school_macroeconomics": {
"name": "mmlu:high_school_macroeconomics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_macroeconomics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 390,
"effective_num_docs": 390
},
"lighteval|mmlu:high_school_mathematics": {
"name": "mmlu:high_school_mathematics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_mathematics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 270,
"effective_num_docs": 270
},
"lighteval|mmlu:high_school_microeconomics": {
"name": "mmlu:high_school_microeconomics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_microeconomics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 238,
"effective_num_docs": 238
},
"lighteval|mmlu:high_school_physics": {
"name": "mmlu:high_school_physics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_physics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 151,
"effective_num_docs": 151
},
"lighteval|mmlu:high_school_psychology": {
"name": "mmlu:high_school_psychology",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_psychology",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 545,
"effective_num_docs": 545
},
"lighteval|mmlu:high_school_statistics": {
"name": "mmlu:high_school_statistics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_statistics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 216,
"effective_num_docs": 216
},
"lighteval|mmlu:high_school_us_history": {
"name": "mmlu:high_school_us_history",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_us_history",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 204,
"effective_num_docs": 204
},
"lighteval|mmlu:high_school_world_history": {
"name": "mmlu:high_school_world_history",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "high_school_world_history",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 237,
"effective_num_docs": 237
},
"lighteval|mmlu:human_aging": {
"name": "mmlu:human_aging",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "human_aging",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 223,
"effective_num_docs": 223
},
"lighteval|mmlu:human_sexuality": {
"name": "mmlu:human_sexuality",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "human_sexuality",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 131,
"effective_num_docs": 131
},
"lighteval|mmlu:international_law": {
"name": "mmlu:international_law",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "international_law",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 121,
"effective_num_docs": 121
},
"lighteval|mmlu:jurisprudence": {
"name": "mmlu:jurisprudence",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "jurisprudence",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 108,
"effective_num_docs": 108
},
"lighteval|mmlu:logical_fallacies": {
"name": "mmlu:logical_fallacies",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "logical_fallacies",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 163,
"effective_num_docs": 163
},
"lighteval|mmlu:machine_learning": {
"name": "mmlu:machine_learning",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "machine_learning",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 112,
"effective_num_docs": 112
},
"lighteval|mmlu:management": {
"name": "mmlu:management",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "management",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 103,
"effective_num_docs": 103
},
"lighteval|mmlu:marketing": {
"name": "mmlu:marketing",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "marketing",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 234,
"effective_num_docs": 234
},
"lighteval|mmlu:medical_genetics": {
"name": "mmlu:medical_genetics",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "medical_genetics",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:miscellaneous": {
"name": "mmlu:miscellaneous",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "miscellaneous",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 783,
"effective_num_docs": 783
},
"lighteval|mmlu:moral_disputes": {
"name": "mmlu:moral_disputes",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "moral_disputes",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 346,
"effective_num_docs": 346
},
"lighteval|mmlu:moral_scenarios": {
"name": "mmlu:moral_scenarios",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "moral_scenarios",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 895,
"effective_num_docs": 895
},
"lighteval|mmlu:nutrition": {
"name": "mmlu:nutrition",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "nutrition",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 306,
"effective_num_docs": 306
},
"lighteval|mmlu:philosophy": {
"name": "mmlu:philosophy",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "philosophy",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 311,
"effective_num_docs": 311
},
"lighteval|mmlu:prehistory": {
"name": "mmlu:prehistory",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "prehistory",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 324,
"effective_num_docs": 324
},
"lighteval|mmlu:professional_accounting": {
"name": "mmlu:professional_accounting",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_accounting",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 282,
"effective_num_docs": 282
},
"lighteval|mmlu:professional_law": {
"name": "mmlu:professional_law",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_law",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 1534,
"effective_num_docs": 1534
},
"lighteval|mmlu:professional_medicine": {
"name": "mmlu:professional_medicine",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_medicine",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 272,
"effective_num_docs": 272
},
"lighteval|mmlu:professional_psychology": {
"name": "mmlu:professional_psychology",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "professional_psychology",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 612,
"effective_num_docs": 612
},
"lighteval|mmlu:public_relations": {
"name": "mmlu:public_relations",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "public_relations",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 110,
"effective_num_docs": 110
},
"lighteval|mmlu:security_studies": {
"name": "mmlu:security_studies",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "security_studies",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 245,
"effective_num_docs": 245
},
"lighteval|mmlu:sociology": {
"name": "mmlu:sociology",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "sociology",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 201,
"effective_num_docs": 201
},
"lighteval|mmlu:us_foreign_policy": {
"name": "mmlu:us_foreign_policy",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "us_foreign_policy",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 100,
"effective_num_docs": 100
},
"lighteval|mmlu:virology": {
"name": "mmlu:virology",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "virology",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 166,
"effective_num_docs": 166
},
"lighteval|mmlu:world_religions": {
"name": "mmlu:world_religions",
"prompt_function": "mmlu_harness",
"hf_repo": "lighteval/mmlu",
"hf_subset": "world_religions",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"auxiliary_train",
"test",
"validation",
"dev"
],
"evaluation_splits": [
"test"
],
"few_shots_split": "dev",
"few_shots_select": "sequential",
"generation_size": 1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval",
"mmlu"
],
"original_num_docs": 171,
"effective_num_docs": 171
}
},
"summary_tasks": {
"lighteval|mmlu:abstract_algebra|5": {
"hashes": {
"hash_examples": "4c76229e00c9c0e9",
"hash_full_prompts": "774b83862a0f4a87",
"hash_input_tokens": "7f8f857461d1b02a",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:anatomy|5": {
"hashes": {
"hash_examples": "6a1f8104dccbd33b",
"hash_full_prompts": "93d3416fc0b6fa4d",
"hash_input_tokens": "6608d02344e3663d",
"hash_cont_tokens": "d05d09cd572a873c"
},
"truncated": 0,
"non_truncated": 135,
"padded": 540,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:astronomy|5": {
"hashes": {
"hash_examples": "1302effa3a76ce4c",
"hash_full_prompts": "b5290fb613b22e56",
"hash_input_tokens": "f19097128252fa6a",
"hash_cont_tokens": "4d6a28d8890ec144"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:business_ethics|5": {
"hashes": {
"hash_examples": "03cb8bce5336419a",
"hash_full_prompts": "94a1e9b92c2876bd",
"hash_input_tokens": "a0ec2ea728d51f71",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:clinical_knowledge|5": {
"hashes": {
"hash_examples": "ffbb9c7b2be257f9",
"hash_full_prompts": "c7d5ab4ed4656b35",
"hash_input_tokens": "c4785ecb747ca5f2",
"hash_cont_tokens": "3c7d8a8186c6590b"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1060,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:college_biology|5": {
"hashes": {
"hash_examples": "3ee77f176f38eb8e",
"hash_full_prompts": "6a07902ec7f9e62b",
"hash_input_tokens": "63b45528a00a4db7",
"hash_cont_tokens": "279773aa6ce61940"
},
"truncated": 0,
"non_truncated": 144,
"padded": 576,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:college_chemistry|5": {
"hashes": {
"hash_examples": "ce61a69c46d47aeb",
"hash_full_prompts": "856fcb3d0ff7cebb",
"hash_input_tokens": "8762397e3d50afc3",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:college_computer_science|5": {
"hashes": {
"hash_examples": "32805b52d7d5daab",
"hash_full_prompts": "b5da87a214cffb32",
"hash_input_tokens": "36d46b2ce52cb68c",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:college_mathematics|5": {
"hashes": {
"hash_examples": "55da1a0a0bd33722",
"hash_full_prompts": "b5c95b3698023a02",
"hash_input_tokens": "9101633209bc18dc",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:college_medicine|5": {
"hashes": {
"hash_examples": "c33e143163049176",
"hash_full_prompts": "3acf5469b7f29fcb",
"hash_input_tokens": "91985da79e038a2b",
"hash_cont_tokens": "cbef7f51c0e05888"
},
"truncated": 0,
"non_truncated": 173,
"padded": 692,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:college_physics|5": {
"hashes": {
"hash_examples": "ebdab1cdb7e555df",
"hash_full_prompts": "5c32536644631b28",
"hash_input_tokens": "06fdc345f6f21f9c",
"hash_cont_tokens": "6bd186b352215ef3"
},
"truncated": 0,
"non_truncated": 102,
"padded": 408,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:computer_security|5": {
"hashes": {
"hash_examples": "a24fd7d08a560921",
"hash_full_prompts": "e42fdaa85d5acd30",
"hash_input_tokens": "7f2a37846e8e4edf",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:conceptual_physics|5": {
"hashes": {
"hash_examples": "8300977a79386993",
"hash_full_prompts": "c6d43af5dd336d5c",
"hash_input_tokens": "a88b8b15afda96e5",
"hash_cont_tokens": "a34c616b69e32801"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:econometrics|5": {
"hashes": {
"hash_examples": "ddde36788a04a46f",
"hash_full_prompts": "31034501da0bc7f9",
"hash_input_tokens": "b752fa0cd22b27d2",
"hash_cont_tokens": "7351796ba24b666b"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:electrical_engineering|5": {
"hashes": {
"hash_examples": "acbc5def98c19b3f",
"hash_full_prompts": "476fad4f096cd68b",
"hash_input_tokens": "c0db0aeb672071c6",
"hash_cont_tokens": "b8fba554eb3ec8a8"
},
"truncated": 0,
"non_truncated": 145,
"padded": 580,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:elementary_mathematics|5": {
"hashes": {
"hash_examples": "146e61d07497a9bd",
"hash_full_prompts": "3824eba06dea055b",
"hash_input_tokens": "c6276361a789de01",
"hash_cont_tokens": "580743d9bba4b95f"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1512,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:formal_logic|5": {
"hashes": {
"hash_examples": "8635216e1909a03f",
"hash_full_prompts": "cd3c41e2909d7571",
"hash_input_tokens": "8f00b0cc9c84664b",
"hash_cont_tokens": "878d60194103c91e"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:global_facts|5": {
"hashes": {
"hash_examples": "30b315aa6353ee47",
"hash_full_prompts": "fcc00c7889a93c34",
"hash_input_tokens": "397edb2d9666a794",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_biology|5": {
"hashes": {
"hash_examples": "c9136373af2180de",
"hash_full_prompts": "a9c7b95be4df25a9",
"hash_input_tokens": "6c53ceaa135a34c7",
"hash_cont_tokens": "b0b8cdbb66ec59b7"
},
"truncated": 0,
"non_truncated": 310,
"padded": 1240,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_chemistry|5": {
"hashes": {
"hash_examples": "b0661bfa1add6404",
"hash_full_prompts": "c68562216951f8bb",
"hash_input_tokens": "66de539fe2d2c3c5",
"hash_cont_tokens": "2a19b9ffb2820943"
},
"truncated": 0,
"non_truncated": 203,
"padded": 812,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_computer_science|5": {
"hashes": {
"hash_examples": "80fc1d623a3d665f",
"hash_full_prompts": "30177b8fd27a2f4a",
"hash_input_tokens": "2aab95956ad2727f",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_european_history|5": {
"hashes": {
"hash_examples": "854da6e5af0fe1a1",
"hash_full_prompts": "17633e6137cb41ff",
"hash_input_tokens": "5b7b4833f3d4cbbb",
"hash_cont_tokens": "f21c8315afc26a95"
},
"truncated": 0,
"non_truncated": 165,
"padded": 656,
"non_padded": 4,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_geography|5": {
"hashes": {
"hash_examples": "7dc963c7acd19ad8",
"hash_full_prompts": "e09912c03a159cd5",
"hash_input_tokens": "1b8f627c809dbe9a",
"hash_cont_tokens": "f82ab751ad945667"
},
"truncated": 0,
"non_truncated": 198,
"padded": 789,
"non_padded": 3,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_government_and_politics|5": {
"hashes": {
"hash_examples": "1f675dcdebc9758f",
"hash_full_prompts": "5c0e4af228af531c",
"hash_input_tokens": "9f3eee34625e5456",
"hash_cont_tokens": "0b0ce904859f0ec4"
},
"truncated": 0,
"non_truncated": 193,
"padded": 772,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_macroeconomics|5": {
"hashes": {
"hash_examples": "2fb32cf2d80f0b35",
"hash_full_prompts": "af9247e5b6fe98bb",
"hash_input_tokens": "1b2fe200ddbdd870",
"hash_cont_tokens": "7d40aacf6121b89f"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1544,
"non_padded": 16,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_mathematics|5": {
"hashes": {
"hash_examples": "fd6646fdb5d58a1f",
"hash_full_prompts": "7cb1a2fb40268262",
"hash_input_tokens": "d9d7dc1c0e6ceaa5",
"hash_cont_tokens": "ca9c605c4168ff50"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1058,
"non_padded": 22,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_microeconomics|5": {
"hashes": {
"hash_examples": "2118f21f71d87d84",
"hash_full_prompts": "fa37211a886f2ef4",
"hash_input_tokens": "a9b3710e3052f750",
"hash_cont_tokens": "70f33540f416b6ac"
},
"truncated": 0,
"non_truncated": 238,
"padded": 944,
"non_padded": 8,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_physics|5": {
"hashes": {
"hash_examples": "dc3ce06378548565",
"hash_full_prompts": "98ea9f98582a2b5d",
"hash_input_tokens": "7e14353bd002ec3f",
"hash_cont_tokens": "c579fb7928fc7347"
},
"truncated": 0,
"non_truncated": 151,
"padded": 600,
"non_padded": 4,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_psychology|5": {
"hashes": {
"hash_examples": "c8d1d98a40e11f2f",
"hash_full_prompts": "f67cc3775a08925f",
"hash_input_tokens": "8680206d3865cfb1",
"hash_cont_tokens": "a23ab0a1475971ce"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2176,
"non_padded": 4,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_statistics|5": {
"hashes": {
"hash_examples": "666c8759b98ee4ff",
"hash_full_prompts": "b0ee6c2c6929bec2",
"hash_input_tokens": "7ae7cf342994353f",
"hash_cont_tokens": "9ce5eedde4afe621"
},
"truncated": 0,
"non_truncated": 216,
"padded": 864,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_us_history|5": {
"hashes": {
"hash_examples": "95fef1c4b7d3f81e",
"hash_full_prompts": "1312aeb9ea05f2c7",
"hash_input_tokens": "e14c08939d9c9410",
"hash_cont_tokens": "4fdcb7c482bcec53"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:high_school_world_history|5": {
"hashes": {
"hash_examples": "7e5085b6184b0322",
"hash_full_prompts": "be2b8a9add49f17d",
"hash_input_tokens": "087ed77123b6ffd2",
"hash_cont_tokens": "d83ae9a96b0aed4d"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:human_aging|5": {
"hashes": {
"hash_examples": "c17333e7c7c10797",
"hash_full_prompts": "79eef4cda3705c09",
"hash_input_tokens": "a1dbdda3eb92666c",
"hash_cont_tokens": "359e961386472ea9"
},
"truncated": 0,
"non_truncated": 223,
"padded": 892,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:human_sexuality|5": {
"hashes": {
"hash_examples": "4edd1e9045df5e3d",
"hash_full_prompts": "981625b82b9a81fb",
"hash_input_tokens": "1b28a6f457841ed1",
"hash_cont_tokens": "13b88a3c56899de3"
},
"truncated": 0,
"non_truncated": 131,
"padded": 524,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:international_law|5": {
"hashes": {
"hash_examples": "db2fa00d771a062a",
"hash_full_prompts": "22956e858492f844",
"hash_input_tokens": "17dce58d20bcc641",
"hash_cont_tokens": "8bf1d19afb0a50e8"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:jurisprudence|5": {
"hashes": {
"hash_examples": "e956f86b124076fe",
"hash_full_prompts": "bd461536e495400a",
"hash_input_tokens": "435be9428e08d588",
"hash_cont_tokens": "8d9975285e0d88e8"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:logical_fallacies|5": {
"hashes": {
"hash_examples": "956e0e6365ab79f1",
"hash_full_prompts": "fedc6acbbbbc6cfe",
"hash_input_tokens": "1691c7b6cd154562",
"hash_cont_tokens": "d3cde9e2249ca662"
},
"truncated": 0,
"non_truncated": 163,
"padded": 616,
"non_padded": 36,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:machine_learning|5": {
"hashes": {
"hash_examples": "397997cc6f4d581e",
"hash_full_prompts": "d62d4107dcf4f9ce",
"hash_input_tokens": "076d12b029f02d30",
"hash_cont_tokens": "a6b058f5d92113d7"
},
"truncated": 0,
"non_truncated": 112,
"padded": 448,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:management|5": {
"hashes": {
"hash_examples": "2bcbe6f6ca63d740",
"hash_full_prompts": "1719983af1791060",
"hash_input_tokens": "1929721cc62604fb",
"hash_cont_tokens": "a6ec1a16c3f83d45"
},
"truncated": 0,
"non_truncated": 103,
"padded": 412,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:marketing|5": {
"hashes": {
"hash_examples": "8ddb20d964a1b065",
"hash_full_prompts": "0d7eda291da6fa36",
"hash_input_tokens": "a5f9035e6ecb3e86",
"hash_cont_tokens": "a6b4369bfe693179"
},
"truncated": 0,
"non_truncated": 234,
"padded": 896,
"non_padded": 40,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:medical_genetics|5": {
"hashes": {
"hash_examples": "182a71f4763d2cea",
"hash_full_prompts": "9eabf9eebfe80e45",
"hash_input_tokens": "5ffce59cba72a0b0",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:miscellaneous|5": {
"hashes": {
"hash_examples": "4c404fdbb4ca57fc",
"hash_full_prompts": "37049d695194a3d9",
"hash_input_tokens": "83042afa8c091279",
"hash_cont_tokens": "7dc21f4e1f667ce7"
},
"truncated": 0,
"non_truncated": 783,
"padded": 3128,
"non_padded": 4,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:moral_disputes|5": {
"hashes": {
"hash_examples": "60cbd2baa3fea5c9",
"hash_full_prompts": "11dda175e9d34d98",
"hash_input_tokens": "6fc4daa54d19cd05",
"hash_cont_tokens": "fb15c2a2d27e53ab"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1384,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:moral_scenarios|5": {
"hashes": {
"hash_examples": "fd8b0431fbdd75ef",
"hash_full_prompts": "40dc6839b42f55a8",
"hash_input_tokens": "72150be20232cc25",
"hash_cont_tokens": "140dd53ea4a2ec5c"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3539,
"non_padded": 41,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:nutrition|5": {
"hashes": {
"hash_examples": "71e55e2b829b6528",
"hash_full_prompts": "9c6c9e2c49db9c15",
"hash_input_tokens": "e2751b59457acc80",
"hash_cont_tokens": "97c17d5c69629460"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1220,
"non_padded": 4,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:philosophy|5": {
"hashes": {
"hash_examples": "a6d489a8d208fa4b",
"hash_full_prompts": "4ff886e81d83df6e",
"hash_input_tokens": "d87321734c71430b",
"hash_cont_tokens": "7e293ea3651a41f0"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1244,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:prehistory|5": {
"hashes": {
"hash_examples": "6cc50f032a19acaa",
"hash_full_prompts": "3cb5c8f5bf7ca339",
"hash_input_tokens": "fed72da150e1f4f1",
"hash_cont_tokens": "2ad2293847938718"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1288,
"non_padded": 8,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:professional_accounting|5": {
"hashes": {
"hash_examples": "50f57ab32f5f6cea",
"hash_full_prompts": "75e5b48959c86f99",
"hash_input_tokens": "6a60ed02f0f13b2f",
"hash_cont_tokens": "b5c8d967dd2166a6"
},
"truncated": 0,
"non_truncated": 282,
"padded": 1116,
"non_padded": 12,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:professional_law|5": {
"hashes": {
"hash_examples": "a8fdc85c64f4b215",
"hash_full_prompts": "37ed26769d19497c",
"hash_input_tokens": "fcd9966b5a500d68",
"hash_cont_tokens": "60365138ed0a0c95"
},
"truncated": 0,
"non_truncated": 1534,
"padded": 6136,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:professional_medicine|5": {
"hashes": {
"hash_examples": "c373a28a3050a73a",
"hash_full_prompts": "f2532c95c343d2e6",
"hash_input_tokens": "c879f5701e5577ec",
"hash_cont_tokens": "a3cc386f7c6b7111"
},
"truncated": 0,
"non_truncated": 272,
"padded": 1088,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:professional_psychology|5": {
"hashes": {
"hash_examples": "bf5254fe818356af",
"hash_full_prompts": "0cd369119588d426",
"hash_input_tokens": "13944f5e078ce0e9",
"hash_cont_tokens": "9c87a3573a934a92"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2436,
"non_padded": 12,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:public_relations|5": {
"hashes": {
"hash_examples": "b66d52e28e7d14e0",
"hash_full_prompts": "7feeee7af5d2c907",
"hash_input_tokens": "99d37b59ac012f28",
"hash_cont_tokens": "682660bab76a30ac"
},
"truncated": 0,
"non_truncated": 110,
"padded": 432,
"non_padded": 8,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:security_studies|5": {
"hashes": {
"hash_examples": "514c14feaf000ad9",
"hash_full_prompts": "c3d1d877f4f4b78f",
"hash_input_tokens": "8dde0795687cda18",
"hash_cont_tokens": "bdbd97851979e380"
},
"truncated": 0,
"non_truncated": 245,
"padded": 980,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:sociology|5": {
"hashes": {
"hash_examples": "f6c9bc9d18c80870",
"hash_full_prompts": "23aca3ea2d5679b0",
"hash_input_tokens": "defbb6039b0d54f3",
"hash_cont_tokens": "0b1260d4ac757487"
},
"truncated": 0,
"non_truncated": 201,
"padded": 792,
"non_padded": 12,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:us_foreign_policy|5": {
"hashes": {
"hash_examples": "ed7b78629db6678f",
"hash_full_prompts": "afda14a61f8acea5",
"hash_input_tokens": "1922ae6afdfc79e5",
"hash_cont_tokens": "48fd32d5b6df1b99"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:virology|5": {
"hashes": {
"hash_examples": "bc52ffdc3f9b994a",
"hash_full_prompts": "9706b066772abe1c",
"hash_input_tokens": "37acad3f38763726",
"hash_cont_tokens": "e1cee0c4ae2bb23e"
},
"truncated": 0,
"non_truncated": 166,
"padded": 664,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
},
"lighteval|mmlu:world_religions|5": {
"hashes": {
"hash_examples": "ecdb4a4f94f62930",
"hash_full_prompts": "76ff0e05c4e43c19",
"hash_input_tokens": "78fecf98c2ff191c",
"hash_cont_tokens": "84c9d2a4b024cea5"
},
"truncated": 0,
"non_truncated": 171,
"padded": 684,
"non_padded": 0,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "341a076d0beb7048",
"hash_full_prompts": "fc530342424eba48",
"hash_input_tokens": "670aa49790b06170",
"hash_cont_tokens": "7f1d0cbb1f08e6bc"
},
"truncated": 0,
"non_truncated": 14042,
"padded": 55930,
"non_padded": 238,
"num_truncated_few_shots": 0
}
}