Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
v2_results / Qwen /Qwen1.5-4B /results_2025-01-18T02-39-16.146945.json
amztheory's picture
Upload Qwen/Qwen1.5-4B/results_2025-01-18T02-39-16.146945.json with huggingface_hub
aff5d95 verified
raw
history blame
246 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": 0,
"start_time": 770.876466535,
"end_time": 4123.191811979,
"total_evaluation_time_secondes": "3352.315345444",
"model_name": "Qwen/Qwen1.5-4B",
"model_sha": "a66363a0c24e2155c561e4b53c658b1d3965474e",
"model_dtype": "torch.bfloat16",
"model_size": "7.36 GB"
},
"results": {
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.22980251346499103,
"acc_norm_stderr": 0.017841897495538436
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.2417052826691381,
"acc_norm_stderr": 0.005829168580525378
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.25921787709497207,
"acc_norm_stderr": 0.014655780837497717
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.36666666666666664,
"acc_norm_stderr": 0.039478328284971595
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.2866666666666667,
"acc_norm_stderr": 0.037046034204230215
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.5659787367104441,
"acc_norm_stderr": 0.005543365174770748
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.4957464553794829,
"acc_norm_stderr": 0.006457968456328882
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.29127906976744183,
"acc_norm_stderr": 0.010958581212940627
},
"community|arabic_exams|0": {
"acc_norm": 0.24581005586592178,
"acc_norm_stderr": 0.018597638285962777
},
"community|arabic_mmlu:Accounting (University)|0": {
"acc_norm": 0.4189189189189189,
"acc_norm_stderr": 0.057746002446083286
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"acc_norm": 0.29901960784313725,
"acc_norm_stderr": 0.018521756215423024
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"acc_norm": 0.2136986301369863,
"acc_norm_stderr": 0.02148546183747688
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.022421273612923707
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.09245003270420483
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"acc_norm": 0.4126984126984127,
"acc_norm_stderr": 0.031074927625190884
},
"community|arabic_mmlu:Biology (High School)|0": {
"acc_norm": 0.32860184528034064,
"acc_norm_stderr": 0.012517677062185501
},
"community|arabic_mmlu:Civics (High School)|0": {
"acc_norm": 0.367816091954023,
"acc_norm_stderr": 0.05199814559011102
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"acc_norm": 0.2627118644067797,
"acc_norm_stderr": 0.028709427668446415
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"acc_norm": 0.36015325670498083,
"acc_norm_stderr": 0.02977110682354859
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.09470524295495535
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"acc_norm": 0.6,
"acc_norm_stderr": 0.035634832254989875
},
"community|arabic_mmlu:Computer Science (University)|0": {
"acc_norm": 0.5,
"acc_norm_stderr": 0.06299407883487121
},
"community|arabic_mmlu:Driving Test|0": {
"acc_norm": 0.4508670520231214,
"acc_norm_stderr": 0.014304422058661375
},
"community|arabic_mmlu:Economics (High School)|0": {
"acc_norm": 0.33611111111111114,
"acc_norm_stderr": 0.0249311269740684
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"acc_norm": 0.5862068965517241,
"acc_norm_stderr": 0.053108967729906795
},
"community|arabic_mmlu:Economics (University)|0": {
"acc_norm": 0.39416058394160586,
"acc_norm_stderr": 0.041903076014460645
},
"community|arabic_mmlu:General Knowledge|0": {
"acc_norm": 0.3136574074074074,
"acc_norm_stderr": 0.015794026115395626
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"acc_norm": 0.4476744186046512,
"acc_norm_stderr": 0.03802600168672209
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"acc_norm": 0.35185185185185186,
"acc_norm_stderr": 0.0376360576248639
},
"community|arabic_mmlu:Geography (High School)|0": {
"acc_norm": 0.3371868978805395,
"acc_norm_stderr": 0.014680522384815589
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.02989616303312547
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.05884389414473132
},
"community|arabic_mmlu:History (High School)|0": {
"acc_norm": 0.33421052631578946,
"acc_norm_stderr": 0.017122123943809797
},
"community|arabic_mmlu:History (Middle School)|0": {
"acc_norm": 0.31527093596059114,
"acc_norm_stderr": 0.03269080871970186
},
"community|arabic_mmlu:History (Primary School)|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201943
},
"community|arabic_mmlu:Islamic Studies|0": {
"acc_norm": 0.27543035993740217,
"acc_norm_stderr": 0.017686244799648657
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"acc_norm": 0.4161676646706587,
"acc_norm_stderr": 0.02701196151197486
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"acc_norm": 0.3445378151260504,
"acc_norm_stderr": 0.030868682604121622
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"acc_norm": 0.43243243243243246,
"acc_norm_stderr": 0.015682043131127095
},
"community|arabic_mmlu:Law (Professional)|0": {
"acc_norm": 0.5222929936305732,
"acc_norm_stderr": 0.028233566139982937
},
"community|arabic_mmlu:Management (University)|0": {
"acc_norm": 0.64,
"acc_norm_stderr": 0.05579886659703323
},
"community|arabic_mmlu:Math (Primary School)|0": {
"acc_norm": 0.36185819070904646,
"acc_norm_stderr": 0.023790179661944643
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"acc_norm": 0.384297520661157,
"acc_norm_stderr": 0.03133363075160923
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.027037727051850487
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"acc_norm": 0.3076923076923077,
"acc_norm_stderr": 0.074871425137275
},
"community|arabic_mmlu:Physics (High School)|0": {
"acc_norm": 0.3215686274509804,
"acc_norm_stderr": 0.02930710626317035
},
"community|arabic_mmlu:Political Science (University)|0": {
"acc_norm": 0.4238095238095238,
"acc_norm_stderr": 0.03418182533795968
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"acc_norm": 0.3153526970954357,
"acc_norm_stderr": 0.029993427997512333
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"acc_norm": 0.4765957446808511,
"acc_norm_stderr": 0.01882380326514791
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932268
},
"community|arabic_mmlu_ht:anatomy|0": {
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.03633384414073465
},
"community|arabic_mmlu_ht:astronomy|0": {
"acc_norm": 0.2236842105263158,
"acc_norm_stderr": 0.03391160934343602
},
"community|arabic_mmlu_ht:business_ethics|0": {
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"acc_norm": 0.2943396226415094,
"acc_norm_stderr": 0.02804918631569525
},
"community|arabic_mmlu_ht:college_biology|0": {
"acc_norm": 0.2847222222222222,
"acc_norm_stderr": 0.037738099906869334
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816506
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"acc_norm": 0.22,
"acc_norm_stderr": 0.041633319989322695
},
"community|arabic_mmlu_ht:college_medicine|0": {
"acc_norm": 0.3063583815028902,
"acc_norm_stderr": 0.03514942551267438
},
"community|arabic_mmlu_ht:college_physics|0": {
"acc_norm": 0.17647058823529413,
"acc_norm_stderr": 0.03793281185307808
},
"community|arabic_mmlu_ht:computer_security|0": {
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"acc_norm": 0.3617021276595745,
"acc_norm_stderr": 0.03141082197596239
},
"community|arabic_mmlu_ht:econometrics|0": {
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.0414243971948936
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"acc_norm": 0.33793103448275863,
"acc_norm_stderr": 0.0394170763206489
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"acc_norm": 0.29894179894179895,
"acc_norm_stderr": 0.023577604791655802
},
"community|arabic_mmlu_ht:formal_logic|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.042163702135578345
},
"community|arabic_mmlu_ht:global_facts|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"acc_norm": 0.2853396275898243,
"acc_norm_stderr": 0.007313991473833722
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"acc_norm": 0.2806274900398406,
"acc_norm_stderr": 0.007090865537133093
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"acc_norm": 0.26680569185475955,
"acc_norm_stderr": 0.004898934591679852
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"acc_norm": 0.26262626262626265,
"acc_norm_stderr": 0.03135305009533085
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.02951928261681725
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"acc_norm": 0.2877896921480457,
"acc_norm_stderr": 0.008421565356826416
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"acc_norm": 0.2518518518518518,
"acc_norm_stderr": 0.026466117538959916
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.028657491285071987
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"acc_norm": 0.271523178807947,
"acc_norm_stderr": 0.03631329803969653
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"acc_norm": 0.24770642201834864,
"acc_norm_stderr": 0.01850814360254781
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"acc_norm": 0.2752835538752363,
"acc_norm_stderr": 0.006866772812513746
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"acc_norm": 0.28921568627450983,
"acc_norm_stderr": 0.031822318676475544
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"acc_norm": 0.28270042194092826,
"acc_norm_stderr": 0.029312814153955917
},
"community|arabic_mmlu_ht:human_aging|0": {
"acc_norm": 0.36771300448430494,
"acc_norm_stderr": 0.03236198350928275
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"acc_norm": 0.32061068702290074,
"acc_norm_stderr": 0.04093329229834278
},
"community|arabic_mmlu_ht:international_law|0": {
"acc_norm": 0.35537190082644626,
"acc_norm_stderr": 0.04369236326573981
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"acc_norm": 0.3425925925925926,
"acc_norm_stderr": 0.045879047413018084
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"acc_norm": 0.26993865030674846,
"acc_norm_stderr": 0.03487825168497892
},
"community|arabic_mmlu_ht:machine_learning|0": {
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"community|arabic_mmlu_ht:management|0": {
"acc_norm": 0.32038834951456313,
"acc_norm_stderr": 0.0462028408228004
},
"community|arabic_mmlu_ht:marketing|0": {
"acc_norm": 0.4188034188034188,
"acc_norm_stderr": 0.03232128912157792
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"acc_norm": 0.2909090909090909,
"acc_norm_stderr": 0.009234463544994888
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"acc_norm": 0.33815028901734107,
"acc_norm_stderr": 0.02546977014940017
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"community|arabic_mmlu_ht:nutrition|0": {
"acc_norm": 0.3235294117647059,
"acc_norm_stderr": 0.02678745311190654
},
"community|arabic_mmlu_ht:philosophy|0": {
"acc_norm": 0.2508038585209003,
"acc_norm_stderr": 0.024619771956697158
},
"community|arabic_mmlu_ht:prehistory|0": {
"acc_norm": 0.2808641975308642,
"acc_norm_stderr": 0.025006469755799215
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"acc_norm": 0.27425786442179884,
"acc_norm_stderr": 0.006641069838725941
},
"community|arabic_mmlu_ht:professional_law|0": {
"acc_norm": 0.2661825466382872,
"acc_norm_stderr": 0.004945601886977108
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"acc_norm": 0.27550397067806964,
"acc_norm_stderr": 0.011045624785691722
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"acc_norm": 0.28889523265772193,
"acc_norm_stderr": 0.007659120668412166
},
"community|arabic_mmlu_ht:public_relations|0": {
"acc_norm": 0.35454545454545455,
"acc_norm_stderr": 0.04582004841505415
},
"community|arabic_mmlu_ht:security_studies|0": {
"acc_norm": 0.22857142857142856,
"acc_norm_stderr": 0.026882144922307744
},
"community|arabic_mmlu_ht:sociology|0": {
"acc_norm": 0.3283582089552239,
"acc_norm_stderr": 0.033206858897443244
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"acc_norm": 0.44,
"acc_norm_stderr": 0.0498887651569859
},
"community|arabic_mmlu_ht:virology|0": {
"acc_norm": 0.3614457831325301,
"acc_norm_stderr": 0.037400593820293204
},
"community|arabic_mmlu_ht:world_religions|0": {
"acc_norm": 0.36257309941520466,
"acc_norm_stderr": 0.036871306155620606
},
"community|madinah_qa:Arabic Language (General)|0": {
"acc_norm": 0.2957516339869281,
"acc_norm_stderr": 0.018463154132632813
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"acc_norm": 0.3013698630136986,
"acc_norm_stderr": 0.02405043171351818
},
"community|aratrust:Ethics|0": {
"acc_norm": 0.5,
"acc_norm_stderr": 0.06509445549041193
},
"community|aratrust:Illegal|0": {
"acc_norm": 0.5471698113207547,
"acc_norm_stderr": 0.06902828418342014
},
"community|aratrust:MentalHealth|0": {
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.05367388961676614
},
"community|aratrust:Offensive|0": {
"acc_norm": 0.8985507246376812,
"acc_norm_stderr": 0.036613506004249646
},
"community|aratrust:PhysicalHealth|0": {
"acc_norm": 0.5205479452054794,
"acc_norm_stderr": 0.058875785197386454
},
"community|aratrust:Privacy|0": {
"acc_norm": 0.7192982456140351,
"acc_norm_stderr": 0.06004585739704729
},
"community|aratrust:Trustfulness|0": {
"acc_norm": 0.4358974358974359,
"acc_norm_stderr": 0.05651006820711176
},
"community|aratrust:Unfairness|0": {
"acc_norm": 0.2727272727272727,
"acc_norm_stderr": 0.060606060606060615
},
"community|alghafa:_average|0": {
"acc_norm": 0.3618959187133115,
"acc_norm_stderr": 0.021765380660966546
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.3815012480095682,
"acc_norm_stderr": 0.035762353732326266
},
"community|arabic_mmlu_ht:_average|0": {
"acc_norm": 0.2977084158873426,
"acc_norm_stderr": 0.031059395737285504
},
"community|madinah_qa:_average|0": {
"acc_norm": 0.29856074850031333,
"acc_norm_stderr": 0.021256792923075495
},
"community|aratrust:_average|0": {
"acc_norm": 0.572300245214806,
"acc_norm_stderr": 0.05755598833780674
},
"all": {
"acc_norm": 0.34963954195697455,
"acc_norm_stderr": 0.03348997660770591,
"llm_as_judge": 0.47872744539411083,
"llm_as_judge_stderr": 0.00022080852881208962
},
"community|alrage_qa|0": {
"llm_as_judge": 0.47872744539411083,
"llm_as_judge_stderr": 0.00022080852881208962
}
},
"versions": {
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:Accounting (University)|0": 0,
"community|arabic_mmlu:Arabic Language (General)|0": 0,
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0,
"community|arabic_mmlu:Arabic Language (High School)|0": 0,
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0,
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0,
"community|arabic_mmlu:Biology (High School)|0": 0,
"community|arabic_mmlu:Civics (High School)|0": 0,
"community|arabic_mmlu:Civics (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (High School)|0": 0,
"community|arabic_mmlu:Computer Science (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (Primary School)|0": 0,
"community|arabic_mmlu:Computer Science (University)|0": 0,
"community|arabic_mmlu:Driving Test|0": 0,
"community|arabic_mmlu:Economics (High School)|0": 0,
"community|arabic_mmlu:Economics (Middle School)|0": 0,
"community|arabic_mmlu:Economics (University)|0": 0,
"community|arabic_mmlu:General Knowledge|0": 0,
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0,
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0,
"community|arabic_mmlu:Geography (High School)|0": 0,
"community|arabic_mmlu:Geography (Middle School)|0": 0,
"community|arabic_mmlu:Geography (Primary School)|0": 0,
"community|arabic_mmlu:History (High School)|0": 0,
"community|arabic_mmlu:History (Middle School)|0": 0,
"community|arabic_mmlu:History (Primary School)|0": 0,
"community|arabic_mmlu:Islamic Studies|0": 0,
"community|arabic_mmlu:Islamic Studies (High School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0,
"community|arabic_mmlu:Law (Professional)|0": 0,
"community|arabic_mmlu:Management (University)|0": 0,
"community|arabic_mmlu:Math (Primary School)|0": 0,
"community|arabic_mmlu:Natural Science (Middle School)|0": 0,
"community|arabic_mmlu:Natural Science (Primary School)|0": 0,
"community|arabic_mmlu:Philosophy (High School)|0": 0,
"community|arabic_mmlu:Physics (High School)|0": 0,
"community|arabic_mmlu:Political Science (University)|0": 0,
"community|arabic_mmlu:Social Science (Middle School)|0": 0,
"community|arabic_mmlu:Social Science (Primary School)|0": 0,
"community|arabic_mmlu_ht:abstract_algebra|0": 0,
"community|arabic_mmlu_ht:anatomy|0": 0,
"community|arabic_mmlu_ht:astronomy|0": 0,
"community|arabic_mmlu_ht:business_ethics|0": 0,
"community|arabic_mmlu_ht:clinical_knowledge|0": 0,
"community|arabic_mmlu_ht:college_biology|0": 0,
"community|arabic_mmlu_ht:college_chemistry|0": 0,
"community|arabic_mmlu_ht:college_computer_science|0": 0,
"community|arabic_mmlu_ht:college_mathematics|0": 0,
"community|arabic_mmlu_ht:college_medicine|0": 0,
"community|arabic_mmlu_ht:college_physics|0": 0,
"community|arabic_mmlu_ht:computer_security|0": 0,
"community|arabic_mmlu_ht:conceptual_physics|0": 0,
"community|arabic_mmlu_ht:econometrics|0": 0,
"community|arabic_mmlu_ht:electrical_engineering|0": 0,
"community|arabic_mmlu_ht:elementary_mathematics|0": 0,
"community|arabic_mmlu_ht:formal_logic|0": 0,
"community|arabic_mmlu_ht:global_facts|0": 0,
"community|arabic_mmlu_ht:high_school_biology|0": 0,
"community|arabic_mmlu_ht:high_school_chemistry|0": 0,
"community|arabic_mmlu_ht:high_school_computer_science|0": 0,
"community|arabic_mmlu_ht:high_school_european_history|0": 0,
"community|arabic_mmlu_ht:high_school_geography|0": 0,
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0,
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_mathematics|0": 0,
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_physics|0": 0,
"community|arabic_mmlu_ht:high_school_psychology|0": 0,
"community|arabic_mmlu_ht:high_school_statistics|0": 0,
"community|arabic_mmlu_ht:high_school_us_history|0": 0,
"community|arabic_mmlu_ht:high_school_world_history|0": 0,
"community|arabic_mmlu_ht:human_aging|0": 0,
"community|arabic_mmlu_ht:human_sexuality|0": 0,
"community|arabic_mmlu_ht:international_law|0": 0,
"community|arabic_mmlu_ht:jurisprudence|0": 0,
"community|arabic_mmlu_ht:logical_fallacies|0": 0,
"community|arabic_mmlu_ht:machine_learning|0": 0,
"community|arabic_mmlu_ht:management|0": 0,
"community|arabic_mmlu_ht:marketing|0": 0,
"community|arabic_mmlu_ht:medical_genetics|0": 0,
"community|arabic_mmlu_ht:miscellaneous|0": 0,
"community|arabic_mmlu_ht:moral_disputes|0": 0,
"community|arabic_mmlu_ht:moral_scenarios|0": 0,
"community|arabic_mmlu_ht:nutrition|0": 0,
"community|arabic_mmlu_ht:philosophy|0": 0,
"community|arabic_mmlu_ht:prehistory|0": 0,
"community|arabic_mmlu_ht:professional_accounting|0": 0,
"community|arabic_mmlu_ht:professional_law|0": 0,
"community|arabic_mmlu_ht:professional_medicine|0": 0,
"community|arabic_mmlu_ht:professional_psychology|0": 0,
"community|arabic_mmlu_ht:public_relations|0": 0,
"community|arabic_mmlu_ht:security_studies|0": 0,
"community|arabic_mmlu_ht:sociology|0": 0,
"community|arabic_mmlu_ht:us_foreign_policy|0": 0,
"community|arabic_mmlu_ht:virology|0": 0,
"community|arabic_mmlu_ht:world_religions|0": 0,
"community|aratrust:Ethics|0": 0,
"community|aratrust:Illegal|0": 0,
"community|aratrust:MentalHealth|0": 0,
"community|aratrust:Offensive|0": 0,
"community|aratrust:PhysicalHealth|0": 0,
"community|aratrust:Privacy|0": 0,
"community|aratrust:Trustfulness|0": 0,
"community|aratrust:Unfairness|0": 0,
"community|madinah_qa:Arabic Language (General)|0": 0,
"community|madinah_qa:Arabic Language (Grammar)|0": 0,
"community|alrage_qa|0": 0
},
"config_tasks": {
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams_pfn",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Accounting (University)": {
"name": "arabic_mmlu:Accounting (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Accounting (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 74,
"effective_num_docs": 74,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (General)": {
"name": "arabic_mmlu:Arabic Language (General)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)": {
"name": "arabic_mmlu:Arabic Language (Grammar)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (High School)": {
"name": "arabic_mmlu:Arabic Language (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)": {
"name": "arabic_mmlu:Arabic Language (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)": {
"name": "arabic_mmlu:Arabic Language (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 252,
"effective_num_docs": 252,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Biology (High School)": {
"name": "arabic_mmlu:Biology (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Biology (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1409,
"effective_num_docs": 1409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (High School)": {
"name": "arabic_mmlu:Civics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (Middle School)": {
"name": "arabic_mmlu:Civics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 236,
"effective_num_docs": 236,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (High School)": {
"name": "arabic_mmlu:Computer Science (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 261,
"effective_num_docs": 261,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Middle School)": {
"name": "arabic_mmlu:Computer Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Primary School)": {
"name": "arabic_mmlu:Computer Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 190,
"effective_num_docs": 190,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (University)": {
"name": "arabic_mmlu:Computer Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 64,
"effective_num_docs": 64,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Driving Test": {
"name": "arabic_mmlu:Driving Test",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Driving Test",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1211,
"effective_num_docs": 1211,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (High School)": {
"name": "arabic_mmlu:Economics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 360,
"effective_num_docs": 360,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (Middle School)": {
"name": "arabic_mmlu:Economics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (University)": {
"name": "arabic_mmlu:Economics (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 137,
"effective_num_docs": 137,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge": {
"name": "arabic_mmlu:General Knowledge",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 864,
"effective_num_docs": 864,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)": {
"name": "arabic_mmlu:General Knowledge (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 172,
"effective_num_docs": 172,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)": {
"name": "arabic_mmlu:General Knowledge (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 162,
"effective_num_docs": 162,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (High School)": {
"name": "arabic_mmlu:Geography (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1038,
"effective_num_docs": 1038,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Middle School)": {
"name": "arabic_mmlu:Geography (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Primary School)": {
"name": "arabic_mmlu:Geography (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (High School)": {
"name": "arabic_mmlu:History (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 760,
"effective_num_docs": 760,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Middle School)": {
"name": "arabic_mmlu:History (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Primary School)": {
"name": "arabic_mmlu:History (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies": {
"name": "arabic_mmlu:Islamic Studies",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 639,
"effective_num_docs": 639,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (High School)": {
"name": "arabic_mmlu:Islamic Studies (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 334,
"effective_num_docs": 334,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)": {
"name": "arabic_mmlu:Islamic Studies (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)": {
"name": "arabic_mmlu:Islamic Studies (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 999,
"effective_num_docs": 999,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Law (Professional)": {
"name": "arabic_mmlu:Law (Professional)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Law (Professional)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 314,
"effective_num_docs": 314,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Management (University)": {
"name": "arabic_mmlu:Management (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Management (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Math (Primary School)": {
"name": "arabic_mmlu:Math (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Math (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 409,
"effective_num_docs": 409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Middle School)": {
"name": "arabic_mmlu:Natural Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 242,
"effective_num_docs": 242,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Primary School)": {
"name": "arabic_mmlu:Natural Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 336,
"effective_num_docs": 336,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Philosophy (High School)": {
"name": "arabic_mmlu:Philosophy (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Philosophy (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 39,
"effective_num_docs": 39,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Physics (High School)": {
"name": "arabic_mmlu:Physics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Physics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Political Science (University)": {
"name": "arabic_mmlu:Political Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Political Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 210,
"effective_num_docs": 210,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Middle School)": {
"name": "arabic_mmlu:Social Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 241,
"effective_num_docs": 241,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Primary School)": {
"name": "arabic_mmlu:Social Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 705,
"effective_num_docs": 705,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:abstract_algebra": {
"name": "arabic_mmlu_ht:abstract_algebra",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "abstract_algebra",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:anatomy": {
"name": "arabic_mmlu_ht:anatomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "anatomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:astronomy": {
"name": "arabic_mmlu_ht:astronomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "astronomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:business_ethics": {
"name": "arabic_mmlu_ht:business_ethics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "business_ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:clinical_knowledge": {
"name": "arabic_mmlu_ht:clinical_knowledge",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "clinical_knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_biology": {
"name": "arabic_mmlu_ht:college_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_chemistry": {
"name": "arabic_mmlu_ht:college_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_computer_science": {
"name": "arabic_mmlu_ht:college_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_mathematics": {
"name": "arabic_mmlu_ht:college_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_medicine": {
"name": "arabic_mmlu_ht:college_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_physics": {
"name": "arabic_mmlu_ht:college_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:computer_security": {
"name": "arabic_mmlu_ht:computer_security",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "computer_security",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:conceptual_physics": {
"name": "arabic_mmlu_ht:conceptual_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "conceptual_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:econometrics": {
"name": "arabic_mmlu_ht:econometrics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "econometrics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:electrical_engineering": {
"name": "arabic_mmlu_ht:electrical_engineering",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "electrical_engineering",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:elementary_mathematics": {
"name": "arabic_mmlu_ht:elementary_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "elementary_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:formal_logic": {
"name": "arabic_mmlu_ht:formal_logic",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "formal_logic",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:global_facts": {
"name": "arabic_mmlu_ht:global_facts",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "global_facts",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_biology": {
"name": "arabic_mmlu_ht:high_school_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3813,
"effective_num_docs": 3813,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_chemistry": {
"name": "arabic_mmlu_ht:high_school_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4016,
"effective_num_docs": 4016,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_computer_science": {
"name": "arabic_mmlu_ht:high_school_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_european_history": {
"name": "arabic_mmlu_ht:high_school_european_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_european_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 8152,
"effective_num_docs": 8152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_geography": {
"name": "arabic_mmlu_ht:high_school_geography",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_geography",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics": {
"name": "arabic_mmlu_ht:high_school_government_and_politics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_government_and_politics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics": {
"name": "arabic_mmlu_ht:high_school_macroeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_macroeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2891,
"effective_num_docs": 2891,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_mathematics": {
"name": "arabic_mmlu_ht:high_school_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics": {
"name": "arabic_mmlu_ht:high_school_microeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_microeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_physics": {
"name": "arabic_mmlu_ht:high_school_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_psychology": {
"name": "arabic_mmlu_ht:high_school_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_statistics": {
"name": "arabic_mmlu_ht:high_school_statistics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_statistics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4232,
"effective_num_docs": 4232,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_us_history": {
"name": "arabic_mmlu_ht:high_school_us_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_us_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_world_history": {
"name": "arabic_mmlu_ht:high_school_world_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_world_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_aging": {
"name": "arabic_mmlu_ht:human_aging",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_aging",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_sexuality": {
"name": "arabic_mmlu_ht:human_sexuality",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_sexuality",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:international_law": {
"name": "arabic_mmlu_ht:international_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "international_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:jurisprudence": {
"name": "arabic_mmlu_ht:jurisprudence",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "jurisprudence",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:logical_fallacies": {
"name": "arabic_mmlu_ht:logical_fallacies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "logical_fallacies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:machine_learning": {
"name": "arabic_mmlu_ht:machine_learning",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "machine_learning",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:management": {
"name": "arabic_mmlu_ht:management",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "management",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:marketing": {
"name": "arabic_mmlu_ht:marketing",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "marketing",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:medical_genetics": {
"name": "arabic_mmlu_ht:medical_genetics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "medical_genetics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:miscellaneous": {
"name": "arabic_mmlu_ht:miscellaneous",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "miscellaneous",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2420,
"effective_num_docs": 2420,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_disputes": {
"name": "arabic_mmlu_ht:moral_disputes",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_disputes",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_scenarios": {
"name": "arabic_mmlu_ht:moral_scenarios",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_scenarios",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:nutrition": {
"name": "arabic_mmlu_ht:nutrition",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "nutrition",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:philosophy": {
"name": "arabic_mmlu_ht:philosophy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "philosophy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:prehistory": {
"name": "arabic_mmlu_ht:prehistory",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "prehistory",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_accounting": {
"name": "arabic_mmlu_ht:professional_accounting",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_accounting",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4514,
"effective_num_docs": 4514,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_law": {
"name": "arabic_mmlu_ht:professional_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7987,
"effective_num_docs": 7987,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_medicine": {
"name": "arabic_mmlu_ht:professional_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1637,
"effective_num_docs": 1637,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_psychology": {
"name": "arabic_mmlu_ht:professional_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3503,
"effective_num_docs": 3503,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:public_relations": {
"name": "arabic_mmlu_ht:public_relations",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "public_relations",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:security_studies": {
"name": "arabic_mmlu_ht:security_studies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "security_studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:sociology": {
"name": "arabic_mmlu_ht:sociology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "sociology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:us_foreign_policy": {
"name": "arabic_mmlu_ht:us_foreign_policy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "us_foreign_policy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:virology": {
"name": "arabic_mmlu_ht:virology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "virology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:world_religions": {
"name": "arabic_mmlu_ht:world_religions",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "world_religions",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Ethics": {
"name": "aratrust:Ethics",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 60,
"effective_num_docs": 60,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Illegal": {
"name": "aratrust:Illegal",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Illegal",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 53,
"effective_num_docs": 53,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:MentalHealth": {
"name": "aratrust:MentalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "MentalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 76,
"effective_num_docs": 76,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Offensive": {
"name": "aratrust:Offensive",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Offensive",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 69,
"effective_num_docs": 69,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:PhysicalHealth": {
"name": "aratrust:PhysicalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "PhysicalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 73,
"effective_num_docs": 73,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Privacy": {
"name": "aratrust:Privacy",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Privacy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Trustfulness": {
"name": "aratrust:Trustfulness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Trustfulness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 78,
"effective_num_docs": 78,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Unfairness": {
"name": "aratrust:Unfairness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Unfairness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 55,
"effective_num_docs": 55,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (General)": {
"name": "madinah_qa:Arabic Language (General)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (Grammar)": {
"name": "madinah_qa:Arabic Language (Grammar)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alrage_qa": {
"name": "alrage_qa",
"prompt_function": "qa_prompt_arabic",
"hf_repo": "OALL/ALRAGE",
"hf_subset": null,
"metric": [
{
"metric_name": "llm_as_judge",
"higher_is_better": true,
"category": "7",
"use_case": "10",
"sample_level_fn": "_sample_level_fn",
"corpus_level_fn": "aggregate_scores"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 200,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2106,
"effective_num_docs": 2106,
"must_remove_duplicate_docs": false,
"version": 0
}
},
"summary_tasks": {
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "34da269ef01f8f35",
"hash_input_tokens": "6923d5bb0fe08d58",
"hash_cont_tokens": "d6519f4be64b0b3f"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "61fad94670ba0541",
"hash_input_tokens": "b01988bda81b3bac",
"hash_cont_tokens": "fe549766a0ce738a"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "b6672c11a56c4763",
"hash_input_tokens": "c893d3f9c4b8ef99",
"hash_cont_tokens": "0349d70fe949b783"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "f671003d511ab7cc",
"hash_input_tokens": "b064fd88389c83f9",
"hash_cont_tokens": "65b9e8001c0d9d5e"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "04c86efa47d50e26",
"hash_input_tokens": "ba18044e4376ab4f",
"hash_cont_tokens": "707d382e7333be99"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "3d3086e874381a32",
"hash_input_tokens": "8da72eadd510888d",
"hash_cont_tokens": "b2739963cb832e04"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "a485a541cb0cbd4f",
"hash_input_tokens": "a31fa255fe1b99f8",
"hash_cont_tokens": "b5f274f703abc5b7"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15990,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "11bb76ccb936df22",
"hash_input_tokens": "a7997ef542efd420",
"hash_cont_tokens": "87c18aae5cfd812b"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17921,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "0f6e11a7f1a6334d",
"hash_input_tokens": "073d4fea5ff1a3ce",
"hash_cont_tokens": "8e03dcc04ffbd0bd"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5096,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "913ee0eebf742d26",
"hash_input_tokens": "252136dfbde42904",
"hash_cont_tokens": "a4ef65bc7bab8dcf"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2096,
"non_padded": 52,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Accounting (University)|0": {
"hashes": {
"hash_examples": "30e09697562ff9e7",
"hash_full_prompts": "014f8382a5a07c61",
"hash_input_tokens": "fd0d54e7088d35da",
"hash_cont_tokens": "587bf4caea1658f4"
},
"truncated": 0,
"non_truncated": 74,
"padded": 256,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "bef69fb8b3b75f28",
"hash_full_prompts": "a56fdc3a4fd4a26b",
"hash_input_tokens": "96cbacb17237e503",
"hash_cont_tokens": "95a234c727b7b43c"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2400,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "bd066a9e6a140a4b",
"hash_full_prompts": "a645c5a3387f2ec9",
"hash_input_tokens": "d5a70ed114eafdbd",
"hash_cont_tokens": "98f874e7446c544b"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1545,
"non_padded": 43,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"hashes": {
"hash_examples": "a9c2cd9a9929292a",
"hash_full_prompts": "556559cb13dae610",
"hash_input_tokens": "56a4b65fc520ae1e",
"hash_cont_tokens": "3d676d0d2f081e05"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1505,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"hashes": {
"hash_examples": "2f8a77bbbd0e21ff",
"hash_full_prompts": "f986f15dbc7e9100",
"hash_input_tokens": "e5baa9ef2fe6629c",
"hash_cont_tokens": "322ea7667dfc2c2d"
},
"truncated": 0,
"non_truncated": 27,
"padded": 105,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"hashes": {
"hash_examples": "5eed3da47822539b",
"hash_full_prompts": "a531f0be2d434100",
"hash_input_tokens": "72d4b5a96c18d83b",
"hash_cont_tokens": "f3c78f80ddea1519"
},
"truncated": 0,
"non_truncated": 252,
"padded": 918,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Biology (High School)|0": {
"hashes": {
"hash_examples": "91ae6d22a0f0213d",
"hash_full_prompts": "8f7200111c3bda2f",
"hash_input_tokens": "f17ea151f736a9ab",
"hash_cont_tokens": "aaa20fdc3c06d2c3"
},
"truncated": 0,
"non_truncated": 1409,
"padded": 4968,
"non_padded": 88,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (High School)|0": {
"hashes": {
"hash_examples": "f27bf8791bea2bb9",
"hash_full_prompts": "b4d0b0deb74ca875",
"hash_input_tokens": "8608580ebc7443af",
"hash_cont_tokens": "e02c7ebfec7f8df8"
},
"truncated": 0,
"non_truncated": 87,
"padded": 312,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"hashes": {
"hash_examples": "74f5bb0098c8916f",
"hash_full_prompts": "0f5c26bab97f062e",
"hash_input_tokens": "323143117e557144",
"hash_cont_tokens": "1ffdd9a463183bfa"
},
"truncated": 0,
"non_truncated": 236,
"padded": 940,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"hashes": {
"hash_examples": "a4278d7b525d46fe",
"hash_full_prompts": "6d031feae91089b4",
"hash_input_tokens": "744c0cbb36309d58",
"hash_cont_tokens": "821feca3d9004c98"
},
"truncated": 0,
"non_truncated": 261,
"padded": 994,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"hashes": {
"hash_examples": "0cb6c07e4b80dfd4",
"hash_full_prompts": "833911f410f06e26",
"hash_input_tokens": "9b724f122a74dca9",
"hash_cont_tokens": "8b4f299b6f012a83"
},
"truncated": 0,
"non_truncated": 27,
"padded": 100,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"hashes": {
"hash_examples": "d96fc1bc32473533",
"hash_full_prompts": "f7dfadae4451a309",
"hash_input_tokens": "1dcb7b47c7198941",
"hash_cont_tokens": "1bc67f97b48b9ece"
},
"truncated": 0,
"non_truncated": 190,
"padded": 476,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (University)|0": {
"hashes": {
"hash_examples": "8835587e436cbaff",
"hash_full_prompts": "3bd4a2b00782ff99",
"hash_input_tokens": "09e8f06b285bced1",
"hash_cont_tokens": "e9d871459bc85f62"
},
"truncated": 0,
"non_truncated": 64,
"padded": 247,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Driving Test|0": {
"hashes": {
"hash_examples": "7a4c38a2c451d075",
"hash_full_prompts": "432155ed8cf8b2e3",
"hash_input_tokens": "159f8d1290fee377",
"hash_cont_tokens": "cd411982b0f12d43"
},
"truncated": 0,
"non_truncated": 1211,
"padded": 3606,
"non_padded": 79,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (High School)|0": {
"hashes": {
"hash_examples": "c04c252836601279",
"hash_full_prompts": "60152ffac1e648c7",
"hash_input_tokens": "292945a13d8865c7",
"hash_cont_tokens": "4bda66df90f2d4d8"
},
"truncated": 0,
"non_truncated": 360,
"padded": 1374,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"hashes": {
"hash_examples": "18fba1579406b3cc",
"hash_full_prompts": "fe97d6515b8ecbc2",
"hash_input_tokens": "75a611b2f1a6c070",
"hash_cont_tokens": "3ea283b0f50a72f5"
},
"truncated": 0,
"non_truncated": 87,
"padded": 344,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (University)|0": {
"hashes": {
"hash_examples": "7c9e86fba8151562",
"hash_full_prompts": "12c31588a7785336",
"hash_input_tokens": "ca69bf384bfd42ec",
"hash_cont_tokens": "91cdb256248a5bdf"
},
"truncated": 0,
"non_truncated": 137,
"padded": 532,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge|0": {
"hashes": {
"hash_examples": "acfbe4e1f0314b85",
"hash_full_prompts": "d777e26be5ef6429",
"hash_input_tokens": "5ad6e00acd92f2f3",
"hash_cont_tokens": "76d704fbedbe5ab8"
},
"truncated": 0,
"non_truncated": 864,
"padded": 3169,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"hashes": {
"hash_examples": "03cd0ecf10224316",
"hash_full_prompts": "91f3146f8965b457",
"hash_input_tokens": "9b8cd4b7ff839d8d",
"hash_cont_tokens": "aff2aed9268be2e2"
},
"truncated": 0,
"non_truncated": 172,
"padded": 607,
"non_padded": 21,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"hashes": {
"hash_examples": "c3ee30196e05e122",
"hash_full_prompts": "8712e31ee1abdc1f",
"hash_input_tokens": "61d2328b056d80af",
"hash_cont_tokens": "6c8978669cdc11fb"
},
"truncated": 0,
"non_truncated": 162,
"padded": 629,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (High School)|0": {
"hashes": {
"hash_examples": "e2e329d2bdd9fb7b",
"hash_full_prompts": "4172b74f530012d1",
"hash_input_tokens": "74dea1c841be128f",
"hash_cont_tokens": "37e2e9c548d8c904"
},
"truncated": 0,
"non_truncated": 1038,
"padded": 4052,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"hashes": {
"hash_examples": "420b161444291989",
"hash_full_prompts": "e8a15bb69931c97f",
"hash_input_tokens": "53f0d9ce48fbfb4a",
"hash_cont_tokens": "5e24bb4c8be23901"
},
"truncated": 0,
"non_truncated": 272,
"padded": 966,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"hashes": {
"hash_examples": "5bc5ca48a4210899",
"hash_full_prompts": "515820199d698444",
"hash_input_tokens": "dd0eccd5d277d6ca",
"hash_cont_tokens": "b9f45957a97d1ecf"
},
"truncated": 0,
"non_truncated": 57,
"padded": 216,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (High School)|0": {
"hashes": {
"hash_examples": "c7cc37f29311bea1",
"hash_full_prompts": "8265a8ac74249261",
"hash_input_tokens": "a63a60de6993d266",
"hash_cont_tokens": "b16e65544485acae"
},
"truncated": 0,
"non_truncated": 760,
"padded": 2886,
"non_padded": 76,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Middle School)|0": {
"hashes": {
"hash_examples": "5b9f1973337153a2",
"hash_full_prompts": "807d9b29253a2d4b",
"hash_input_tokens": "e8ac2a1d4966717b",
"hash_cont_tokens": "e3b355a58a286ee4"
},
"truncated": 0,
"non_truncated": 203,
"padded": 734,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Primary School)|0": {
"hashes": {
"hash_examples": "af2469847007c1fe",
"hash_full_prompts": "b3af283f338ba0e1",
"hash_input_tokens": "3eeeed4796250963",
"hash_cont_tokens": "a52a22630c3cb3f7"
},
"truncated": 0,
"non_truncated": 102,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies|0": {
"hashes": {
"hash_examples": "c8da9b2f16a5ea0f",
"hash_full_prompts": "2661393fe36f1ba5",
"hash_input_tokens": "77e9ec7b54a0be43",
"hash_cont_tokens": "1866597a67ff4424"
},
"truncated": 0,
"non_truncated": 639,
"padded": 2493,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"hashes": {
"hash_examples": "efb11bc8ef398117",
"hash_full_prompts": "b0bee4a17b50c2f3",
"hash_input_tokens": "5e781abbbd5f3722",
"hash_cont_tokens": "6b678abb2fd451bd"
},
"truncated": 0,
"non_truncated": 334,
"padded": 1281,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"hashes": {
"hash_examples": "9e33ab030eebdb99",
"hash_full_prompts": "f425f870e9f35d88",
"hash_input_tokens": "3202b8f5803bdaec",
"hash_cont_tokens": "e0c922e595ad51cd"
},
"truncated": 0,
"non_truncated": 238,
"padded": 867,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"hashes": {
"hash_examples": "4167565d878b20eb",
"hash_full_prompts": "78a6a816c859d681",
"hash_input_tokens": "c0805d905c37c643",
"hash_cont_tokens": "97e2182a63c5686c"
},
"truncated": 0,
"non_truncated": 999,
"padded": 2969,
"non_padded": 55,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Law (Professional)|0": {
"hashes": {
"hash_examples": "e77f52c8fe4352b3",
"hash_full_prompts": "f2150d3a3d7fef4e",
"hash_input_tokens": "fbba3a0468816760",
"hash_cont_tokens": "324cc46c561b417c"
},
"truncated": 0,
"non_truncated": 314,
"padded": 1223,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Management (University)|0": {
"hashes": {
"hash_examples": "09682649b04b7327",
"hash_full_prompts": "846428f541275e8a",
"hash_input_tokens": "cdcff5b9faff9042",
"hash_cont_tokens": "1e98e1e2cd19a5e3"
},
"truncated": 0,
"non_truncated": 75,
"padded": 200,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Math (Primary School)|0": {
"hashes": {
"hash_examples": "edb027bfae7e76f1",
"hash_full_prompts": "66c9ce547e0542b4",
"hash_input_tokens": "2d1ef9e9ea905c17",
"hash_cont_tokens": "632401a080490684"
},
"truncated": 0,
"non_truncated": 409,
"padded": 1290,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"hashes": {
"hash_examples": "96e72c9094c2364c",
"hash_full_prompts": "ebfcce496ef1dae9",
"hash_input_tokens": "1ab5edc99e1e4017",
"hash_cont_tokens": "17e42af5dbb9eee1"
},
"truncated": 0,
"non_truncated": 242,
"padded": 924,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"hashes": {
"hash_examples": "69e35bad3dec5a4d",
"hash_full_prompts": "347440e2faadc755",
"hash_input_tokens": "9cc83b51545fb956",
"hash_cont_tokens": "a7423721c9837336"
},
"truncated": 0,
"non_truncated": 336,
"padded": 1206,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"hashes": {
"hash_examples": "dc6ebd484a02fca5",
"hash_full_prompts": "36346c36d96b7742",
"hash_input_tokens": "b10f293b6c3f00b5",
"hash_cont_tokens": "69b31fc6977897bf"
},
"truncated": 0,
"non_truncated": 39,
"padded": 156,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Physics (High School)|0": {
"hashes": {
"hash_examples": "58a1722472c9e644",
"hash_full_prompts": "aa53ca80de338b24",
"hash_input_tokens": "014e20b796435f24",
"hash_cont_tokens": "f9506aa86f66954d"
},
"truncated": 0,
"non_truncated": 255,
"padded": 996,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Political Science (University)|0": {
"hashes": {
"hash_examples": "07a4ed6aabbdfd1e",
"hash_full_prompts": "fbd170954ef7b83d",
"hash_input_tokens": "be0ec1f4020682bf",
"hash_cont_tokens": "4799b66f49438465"
},
"truncated": 0,
"non_truncated": 210,
"padded": 688,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"hashes": {
"hash_examples": "8ca955902f304664",
"hash_full_prompts": "0dc9370d58faf348",
"hash_input_tokens": "8a67fb1df3af3a28",
"hash_cont_tokens": "4602cb88db99312d"
},
"truncated": 0,
"non_truncated": 241,
"padded": 919,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"hashes": {
"hash_examples": "934025ab3738123c",
"hash_full_prompts": "c5d972f5b1007ee1",
"hash_input_tokens": "071f3c2d3a329c65",
"hash_cont_tokens": "19e973e9f05c9c82"
},
"truncated": 0,
"non_truncated": 705,
"padded": 2004,
"non_padded": 39,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"hashes": {
"hash_examples": "0b557911f2f6d919",
"hash_full_prompts": "ab1666c18f658f17",
"hash_input_tokens": "5079eae0132f9572",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:anatomy|0": {
"hashes": {
"hash_examples": "a552d8a0ef294061",
"hash_full_prompts": "a1b8457af2bd2730",
"hash_input_tokens": "c32c8d70e09bc26c",
"hash_cont_tokens": "96c000fa61c3bd55"
},
"truncated": 0,
"non_truncated": 135,
"padded": 532,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:astronomy|0": {
"hashes": {
"hash_examples": "c4a372d0af7da098",
"hash_full_prompts": "11384879a4089109",
"hash_input_tokens": "fcbbfb05e4b93f9e",
"hash_cont_tokens": "b13cc32205751d90"
},
"truncated": 0,
"non_truncated": 152,
"padded": 604,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:business_ethics|0": {
"hashes": {
"hash_examples": "9f71d816abf8af7a",
"hash_full_prompts": "ddf9e4b521d07cbb",
"hash_input_tokens": "cf8d21a7f4ff0969",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"hashes": {
"hash_examples": "38303cd765589ef3",
"hash_full_prompts": "c939850c18c1f485",
"hash_input_tokens": "9045f4eaad5458f9",
"hash_cont_tokens": "c771582839d4f30c"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1028,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_biology|0": {
"hashes": {
"hash_examples": "dbd9b5d318e60b04",
"hash_full_prompts": "ac205b3759b3a9a4",
"hash_input_tokens": "a1311e4adca9b882",
"hash_cont_tokens": "ec774ac0d0ad658b"
},
"truncated": 0,
"non_truncated": 144,
"padded": 572,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"hashes": {
"hash_examples": "6f88491d03db8a4c",
"hash_full_prompts": "159a67ca2e736fab",
"hash_input_tokens": "f85290c69697e465",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"hashes": {
"hash_examples": "ebfdee5ef2ed5e17",
"hash_full_prompts": "d0dc6217b8cfb3bd",
"hash_input_tokens": "ef316d9cb195026e",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"hashes": {
"hash_examples": "e3f22cd7712aae2f",
"hash_full_prompts": "3bc7e5011e0aa6af",
"hash_input_tokens": "4bbae0d48aa22f2a",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_medicine|0": {
"hashes": {
"hash_examples": "51a5501373afb5a7",
"hash_full_prompts": "0311a774622dfa8f",
"hash_input_tokens": "e161e1c9df0da661",
"hash_cont_tokens": "1823a754e6394181"
},
"truncated": 0,
"non_truncated": 173,
"padded": 680,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_physics|0": {
"hashes": {
"hash_examples": "2d3e015989b108db",
"hash_full_prompts": "877dc1bc05688081",
"hash_input_tokens": "f7166f28b9296efa",
"hash_cont_tokens": "ee5dc873d27b9e10"
},
"truncated": 0,
"non_truncated": 102,
"padded": 404,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:computer_security|0": {
"hashes": {
"hash_examples": "f8810eddc38dfee4",
"hash_full_prompts": "1e3a2c34fa2e6065",
"hash_input_tokens": "fcf4df2c7578071d",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"hashes": {
"hash_examples": "211e32cc43c6b1dc",
"hash_full_prompts": "dd90bd4c0e043021",
"hash_input_tokens": "7209354300868dcf",
"hash_cont_tokens": "b7b580bbcf7e0afa"
},
"truncated": 0,
"non_truncated": 235,
"padded": 896,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:econometrics|0": {
"hashes": {
"hash_examples": "810023786b2484d2",
"hash_full_prompts": "49dc7af5e2fff251",
"hash_input_tokens": "c1cbfe2f6ee82b62",
"hash_cont_tokens": "d44932b2a931e093"
},
"truncated": 0,
"non_truncated": 114,
"padded": 452,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"hashes": {
"hash_examples": "a222760c93eaa1ee",
"hash_full_prompts": "9be471685d280ff2",
"hash_input_tokens": "e54d895cc5de8a32",
"hash_cont_tokens": "159f4cb1232d2a3c"
},
"truncated": 0,
"non_truncated": 145,
"padded": 544,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"hashes": {
"hash_examples": "4c069aeee64dc227",
"hash_full_prompts": "d0598901df4b6269",
"hash_input_tokens": "5bac0a913f1b3894",
"hash_cont_tokens": "2bf44b70baf49dfa"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1500,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:formal_logic|0": {
"hashes": {
"hash_examples": "3cb0ccbf8e8a77ae",
"hash_full_prompts": "9e96fb012b599a96",
"hash_input_tokens": "592697e629ae9267",
"hash_cont_tokens": "8765c45f4711ebb8"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:global_facts|0": {
"hashes": {
"hash_examples": "c1d039e64ea321b9",
"hash_full_prompts": "e4544b657c5cfaea",
"hash_input_tokens": "7c1b792505c2903b",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 388,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"hashes": {
"hash_examples": "ddcb8237bb4ba08a",
"hash_full_prompts": "bd321c6f2d140a86",
"hash_input_tokens": "08cd6adecbfb143b",
"hash_cont_tokens": "49908817551a4513"
},
"truncated": 0,
"non_truncated": 3813,
"padded": 15100,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"hashes": {
"hash_examples": "07061b55c5c436d9",
"hash_full_prompts": "00fece3fc3f4de3c",
"hash_input_tokens": "6b73c4463988a724",
"hash_cont_tokens": "a7f16a586e1cfe0f"
},
"truncated": 0,
"non_truncated": 4016,
"padded": 15912,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"hashes": {
"hash_examples": "8d3405483d5fdcff",
"hash_full_prompts": "13071966e25b866c",
"hash_input_tokens": "42669e64f9dfb8f2",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"hashes": {
"hash_examples": "031c49a430356414",
"hash_full_prompts": "b56bb1e7116e262d",
"hash_input_tokens": "a78b066120563f71",
"hash_cont_tokens": "5420388845898571"
},
"truncated": 0,
"non_truncated": 8152,
"padded": 32448,
"non_padded": 160,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"hashes": {
"hash_examples": "d0ce2b019a66c1de",
"hash_full_prompts": "2f2cb46ddc7d6890",
"hash_input_tokens": "9b0387ed8198a7b5",
"hash_cont_tokens": "fa4a2c8384dfaaa5"
},
"truncated": 0,
"non_truncated": 198,
"padded": 768,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "7d7c6d476d0576b1",
"hash_full_prompts": "2edbb98d3256db30",
"hash_input_tokens": "b2549cdc000bbeae",
"hash_cont_tokens": "682709d2fa91c75e"
},
"truncated": 0,
"non_truncated": 193,
"padded": 768,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "694d3a01c6144ddb",
"hash_full_prompts": "c87be5bed46f74bb",
"hash_input_tokens": "458c0d25c43f710b",
"hash_cont_tokens": "4f2f97c723cb220f"
},
"truncated": 0,
"non_truncated": 2891,
"padded": 11440,
"non_padded": 124,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"hashes": {
"hash_examples": "004f9c0a40b5ec10",
"hash_full_prompts": "eb9958f57fb1a315",
"hash_input_tokens": "f73c9da82586e658",
"hash_cont_tokens": "8130a825e5a2ee3d"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1072,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "80cf03d462e6ccbc",
"hash_full_prompts": "ced3eb77bcc80c9d",
"hash_input_tokens": "eec9f88ca1e8de5d",
"hash_cont_tokens": "4f6974070ef28d29"
},
"truncated": 0,
"non_truncated": 238,
"padded": 948,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"hashes": {
"hash_examples": "92218def5b383845",
"hash_full_prompts": "7fa02eb03fed5d6d",
"hash_input_tokens": "169808d5c0840165",
"hash_cont_tokens": "5d32bcd7ba8252ba"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"hashes": {
"hash_examples": "323f7848fee32e58",
"hash_full_prompts": "9f5b206cff8b7a65",
"hash_input_tokens": "5235bbbe166d28c6",
"hash_cont_tokens": "1512a6938229952b"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2156,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"hashes": {
"hash_examples": "d7bbe0d037cf31ec",
"hash_full_prompts": "5c1dd8fd44464945",
"hash_input_tokens": "7604208535e138d7",
"hash_cont_tokens": "95cb29e5c31221c8"
},
"truncated": 0,
"non_truncated": 4232,
"padded": 16776,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"hashes": {
"hash_examples": "722ec9207e3b0e04",
"hash_full_prompts": "7cf272ea246b07e8",
"hash_input_tokens": "ebb5fe4a5ce3786b",
"hash_cont_tokens": "0c31c2de1e3429bf"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"hashes": {
"hash_examples": "b5eb675d3b578584",
"hash_full_prompts": "38fcc21b2ee517d0",
"hash_input_tokens": "9afc0d58abefd264",
"hash_cont_tokens": "5e704d9d54138833"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_aging|0": {
"hashes": {
"hash_examples": "713ac79cd2dd2d7b",
"hash_full_prompts": "e4c482e811c5cae4",
"hash_input_tokens": "f85025f22480910a",
"hash_cont_tokens": "e5a3e63957647f04"
},
"truncated": 0,
"non_truncated": 223,
"padded": 868,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"hashes": {
"hash_examples": "47551ab4e5dcf6c5",
"hash_full_prompts": "ebfecc50e59782af",
"hash_input_tokens": "6bcc1e9f3c419603",
"hash_cont_tokens": "90a9b6d1231332f4"
},
"truncated": 0,
"non_truncated": 131,
"padded": 512,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:international_law|0": {
"hashes": {
"hash_examples": "da360336943398d5",
"hash_full_prompts": "0f0f5a324b7fc03f",
"hash_input_tokens": "12d87c935a9710ba",
"hash_cont_tokens": "9ab33ab519d55748"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"hashes": {
"hash_examples": "661d161a486fb035",
"hash_full_prompts": "45ffd4e537d1ecb0",
"hash_input_tokens": "91825a776ea8346c",
"hash_cont_tokens": "2cecb6db3790a23b"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"hashes": {
"hash_examples": "5c3926384758bda7",
"hash_full_prompts": "08766a3dcdafe4a0",
"hash_input_tokens": "cc9da80d59b87d6a",
"hash_cont_tokens": "f5c60e363dd9fc3d"
},
"truncated": 0,
"non_truncated": 163,
"padded": 640,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:machine_learning|0": {
"hashes": {
"hash_examples": "3ce756e6a22ffc48",
"hash_full_prompts": "913cd017b1e491cc",
"hash_input_tokens": "4a08820961800d38",
"hash_cont_tokens": "d41e7e44237c0a16"
},
"truncated": 0,
"non_truncated": 112,
"padded": 444,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:management|0": {
"hashes": {
"hash_examples": "20fe769bb3276832",
"hash_full_prompts": "e576a331044c699a",
"hash_input_tokens": "fc2275bcab7861b5",
"hash_cont_tokens": "372864196dbb4cad"
},
"truncated": 0,
"non_truncated": 103,
"padded": 396,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:marketing|0": {
"hashes": {
"hash_examples": "6b19449559d987ce",
"hash_full_prompts": "4ea4e3c330485c1d",
"hash_input_tokens": "c3e3daba0ac3f472",
"hash_cont_tokens": "ad74b6b4e88f6100"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"hashes": {
"hash_examples": "cbb0fa9df0f5435a",
"hash_full_prompts": "21505302fe6bdcd1",
"hash_input_tokens": "c8a7ee5675a5bb7a",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 384,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"hashes": {
"hash_examples": "0a4134046c23cff9",
"hash_full_prompts": "2a7ee3dc1b2a4577",
"hash_input_tokens": "699dc756051bfadc",
"hash_cont_tokens": "f2831dc319b7001c"
},
"truncated": 0,
"non_truncated": 2420,
"padded": 9580,
"non_padded": 100,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"hashes": {
"hash_examples": "1ac8a0967c82caa0",
"hash_full_prompts": "0023386ebe7b5251",
"hash_input_tokens": "e4cf05f28df96426",
"hash_cont_tokens": "d6a32c4f89ec0e43"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1368,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"hashes": {
"hash_examples": "2c0670188bc5a789",
"hash_full_prompts": "47e3f8545e3d3c32",
"hash_input_tokens": "9952daff4e011ef4",
"hash_cont_tokens": "0abad6841e9b5dc1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3504,
"non_padded": 76,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:nutrition|0": {
"hashes": {
"hash_examples": "658628c0dcdfe201",
"hash_full_prompts": "412f7c0ac82ba72e",
"hash_input_tokens": "7603e517cea1e5f0",
"hash_cont_tokens": "1947ff415070dfa5"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1208,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:philosophy|0": {
"hashes": {
"hash_examples": "8b6707b322affafd",
"hash_full_prompts": "b2722fcc1ba8b040",
"hash_input_tokens": "92612b2ecfc31d25",
"hash_cont_tokens": "566ed263a8423f58"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1224,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:prehistory|0": {
"hashes": {
"hash_examples": "0c85ffcdc9a7b367",
"hash_full_prompts": "ef86135380b03be6",
"hash_input_tokens": "e3cd41d4395b7391",
"hash_cont_tokens": "69725bb3099f23d0"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1272,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"hashes": {
"hash_examples": "cce1ea2d5f544b2f",
"hash_full_prompts": "4d261f28b2e35e1f",
"hash_input_tokens": "bf073f8a6cf46c16",
"hash_cont_tokens": "25802ac32c51a7f7"
},
"truncated": 0,
"non_truncated": 4514,
"padded": 17819,
"non_padded": 237,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_law|0": {
"hashes": {
"hash_examples": "1c654b024b54eb4b",
"hash_full_prompts": "e2024afbd2985ece",
"hash_input_tokens": "95ac84d94dcedc32",
"hash_cont_tokens": "7f2b1b7218a1ef40"
},
"truncated": 0,
"non_truncated": 7987,
"padded": 31596,
"non_padded": 352,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"hashes": {
"hash_examples": "c621eaacfa662ebc",
"hash_full_prompts": "0ce54ee1d51cd659",
"hash_input_tokens": "5ea759ee379e7f00",
"hash_cont_tokens": "e1afe1503a5d02c5"
},
"truncated": 0,
"non_truncated": 1637,
"padded": 6476,
"non_padded": 72,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"hashes": {
"hash_examples": "bc14a28eaec87dc4",
"hash_full_prompts": "a7bd0bdf13bf05ad",
"hash_input_tokens": "02836beea9da2008",
"hash_cont_tokens": "1c41f3eeadeec685"
},
"truncated": 0,
"non_truncated": 3503,
"padded": 13760,
"non_padded": 252,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:public_relations|0": {
"hashes": {
"hash_examples": "de4989d9375885c4",
"hash_full_prompts": "879d5e9e9856e7c5",
"hash_input_tokens": "334ff76fd24a18fc",
"hash_cont_tokens": "3914ab4a5d5b69e8"
},
"truncated": 0,
"non_truncated": 110,
"padded": 432,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:security_studies|0": {
"hashes": {
"hash_examples": "3f84bfeec717c6de",
"hash_full_prompts": "da38cc1f2c39b0a5",
"hash_input_tokens": "83c7a4db5babd791",
"hash_cont_tokens": "1d2b199df736dea4"
},
"truncated": 0,
"non_truncated": 245,
"padded": 972,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:sociology|0": {
"hashes": {
"hash_examples": "10d7c2fae10bfcbc",
"hash_full_prompts": "539888fca87366ec",
"hash_input_tokens": "a1fb5a6cf3ff2e23",
"hash_cont_tokens": "3556cb090eda6dec"
},
"truncated": 0,
"non_truncated": 201,
"padded": 788,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"hashes": {
"hash_examples": "bb05f02c38ddaf1a",
"hash_full_prompts": "49643b9cc74703b0",
"hash_input_tokens": "8d98609b2cfba0bb",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 376,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:virology|0": {
"hashes": {
"hash_examples": "290915a48884ede2",
"hash_full_prompts": "28cb11c95e12eb0d",
"hash_input_tokens": "30eafbb13388b0ab",
"hash_cont_tokens": "cbf93f8f3bd5c82c"
},
"truncated": 0,
"non_truncated": 166,
"padded": 636,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:world_religions|0": {
"hashes": {
"hash_examples": "91cc5451c7284f75",
"hash_full_prompts": "d999068b8d3041e7",
"hash_input_tokens": "e57d2e1f5af5b238",
"hash_cont_tokens": "b5fbc024ac54a858"
},
"truncated": 0,
"non_truncated": 171,
"padded": 672,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "25bf94d05f737b63",
"hash_full_prompts": "070271f4446ca788",
"hash_input_tokens": "68d6879c1a53a4e7",
"hash_cont_tokens": "05d3f2bc980e6cbb"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2394,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "e65fe4df843f4380",
"hash_full_prompts": "a565adf1023880ce",
"hash_input_tokens": "2c1889919c06ca34",
"hash_cont_tokens": "ac1327c8a93a78f2"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1581,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Ethics|0": {
"hashes": {
"hash_examples": "5d32da36271c5eb4",
"hash_full_prompts": "cef59e1280d3cec3",
"hash_input_tokens": "8a370251b3fff368",
"hash_cont_tokens": "67fe5dc315ef723c"
},
"truncated": 0,
"non_truncated": 60,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Illegal|0": {
"hashes": {
"hash_examples": "0c07f1f100f2d0e8",
"hash_full_prompts": "e752cd923d178cad",
"hash_input_tokens": "a1d60f247c61f56a",
"hash_cont_tokens": "2cc82a58b4d87abc"
},
"truncated": 0,
"non_truncated": 53,
"padded": 159,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:MentalHealth|0": {
"hashes": {
"hash_examples": "8e5fc5c4704bd96b",
"hash_full_prompts": "aa24f48cf9143589",
"hash_input_tokens": "f15d52762441e7f2",
"hash_cont_tokens": "7b399d0f0a9124f1"
},
"truncated": 0,
"non_truncated": 76,
"padded": 228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Offensive|0": {
"hashes": {
"hash_examples": "5ad4369b7dc5de46",
"hash_full_prompts": "5d11bea32fd09679",
"hash_input_tokens": "40d9b02fae2509c5",
"hash_cont_tokens": "0cd5015bc3370adf"
},
"truncated": 0,
"non_truncated": 69,
"padded": 207,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:PhysicalHealth|0": {
"hashes": {
"hash_examples": "dc2a632e2dcc86db",
"hash_full_prompts": "0029f010e68a62c6",
"hash_input_tokens": "fd4cc9f2a85a5e4a",
"hash_cont_tokens": "cb8655dcad91858d"
},
"truncated": 0,
"non_truncated": 73,
"padded": 210,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Privacy|0": {
"hashes": {
"hash_examples": "295e35448a39e003",
"hash_full_prompts": "a9a4901ba9341ff3",
"hash_input_tokens": "02ac4bfe0f739798",
"hash_cont_tokens": "7f23416c661e2ee5"
},
"truncated": 0,
"non_truncated": 57,
"padded": 162,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Trustfulness|0": {
"hashes": {
"hash_examples": "e79ac1ea5439e623",
"hash_full_prompts": "a0da4efc7f9ec52f",
"hash_input_tokens": "288cb0153aebf874",
"hash_cont_tokens": "ff874dba360c1ede"
},
"truncated": 0,
"non_truncated": 78,
"padded": 228,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Unfairness|0": {
"hashes": {
"hash_examples": "4ac5dccbfbdc5077",
"hash_full_prompts": "faf22f92faf61ba6",
"hash_input_tokens": "0e7a42b057c2b901",
"hash_cont_tokens": "3e990fe3a474dbc5"
},
"truncated": 0,
"non_truncated": 55,
"padded": 159,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alrage_qa|0": {
"hashes": {
"hash_examples": "3edbbe22cabd4160",
"hash_full_prompts": "7fa5276938208dd2",
"hash_input_tokens": "338c70b76f6963cd",
"hash_cont_tokens": "bc2cc5821300392d"
},
"truncated": 2106,
"non_truncated": 0,
"padded": 2106,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "b8b3b49631adcc40",
"hash_full_prompts": "714dd98476cbf2fd",
"hash_input_tokens": "bc76b0162558a688",
"hash_cont_tokens": "5dcdbaafd093fe1c"
},
"truncated": 219,
"non_truncated": 91643,
"padded": 333440,
"non_padded": 3444,
"num_truncated_few_shots": 0
}
}