Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
v2_results / Qwen /Qwen2-0.5B-Instruct /results_2025-01-17T15-33-32.255361.json
amztheory's picture
Upload Qwen/Qwen2-0.5B-Instruct/results_2025-01-17T15-33-32.255361.json with huggingface_hub
3d755aa verified
raw
history blame
246 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": 0,
"start_time": 754.703662926,
"end_time": 2864.478399634,
"total_evaluation_time_secondes": "2109.774736708",
"model_name": "Qwen/Qwen2-0.5B-Instruct",
"model_sha": "c540970f9e29518b1d8f06ab8b24cba66ad77b6d",
"model_dtype": "torch.bfloat16",
"model_size": "942.29 MB"
},
"results": {
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.23518850987432674,
"acc_norm_stderr": 0.017986549465809316
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.24263206672845228,
"acc_norm_stderr": 0.005836763336991001
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.26033519553072626,
"acc_norm_stderr": 0.014676252009319482
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.32666666666666666,
"acc_norm_stderr": 0.03842150156165228
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.035934317410871074
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.5592245153220763,
"acc_norm_stderr": 0.00555289862238128
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.3384487072560467,
"acc_norm_stderr": 0.0061118096763272695
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.3546511627906977,
"acc_norm_stderr": 0.011538783415089801
},
"community|arabic_exams|0": {
"acc_norm": 0.2849162011173184,
"acc_norm_stderr": 0.019496409900008914
},
"community|arabic_mmlu:Accounting (University)|0": {
"acc_norm": 0.4594594594594595,
"acc_norm_stderr": 0.05832789513012365
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"acc_norm": 0.33169934640522875,
"acc_norm_stderr": 0.019047485239360378
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"acc_norm": 0.2465753424657534,
"acc_norm_stderr": 0.02259144682513467
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"acc_norm": 0.3282051282051282,
"acc_norm_stderr": 0.023807633198657262
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.0895511888632576
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"acc_norm": 0.29365079365079366,
"acc_norm_stderr": 0.02874673063268138
},
"community|arabic_mmlu:Biology (High School)|0": {
"acc_norm": 0.3747338537970192,
"acc_norm_stderr": 0.012900085684381469
},
"community|arabic_mmlu:Civics (High School)|0": {
"acc_norm": 0.28735632183908044,
"acc_norm_stderr": 0.048797477314965754
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"acc_norm": 0.3008474576271186,
"acc_norm_stderr": 0.029917493983288006
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"acc_norm": 0.39846743295019155,
"acc_norm_stderr": 0.030362624913653623
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.09745089103411438
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"acc_norm": 0.5473684210526316,
"acc_norm_stderr": 0.03620607045823047
},
"community|arabic_mmlu:Computer Science (University)|0": {
"acc_norm": 0.359375,
"acc_norm_stderr": 0.060451293443302384
},
"community|arabic_mmlu:Driving Test|0": {
"acc_norm": 0.3740710156895128,
"acc_norm_stderr": 0.013910632443112572
},
"community|arabic_mmlu:Economics (High School)|0": {
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.024433468738193517
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"acc_norm": 0.39080459770114945,
"acc_norm_stderr": 0.052614920082724996
},
"community|arabic_mmlu:Economics (University)|0": {
"acc_norm": 0.40875912408759124,
"acc_norm_stderr": 0.042154748403487055
},
"community|arabic_mmlu:General Knowledge|0": {
"acc_norm": 0.32175925925925924,
"acc_norm_stderr": 0.015902011589237118
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"acc_norm": 0.4186046511627907,
"acc_norm_stderr": 0.037725911890875034
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"acc_norm": 0.41975308641975306,
"acc_norm_stderr": 0.03889470040548675
},
"community|arabic_mmlu:Geography (High School)|0": {
"acc_norm": 0.27842003853564545,
"acc_norm_stderr": 0.013918841793523958
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"acc_norm": 0.35661764705882354,
"acc_norm_stderr": 0.02909720956841195
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.06299407883487118
},
"community|arabic_mmlu:History (High School)|0": {
"acc_norm": 0.3,
"acc_norm_stderr": 0.01663369597826159
},
"community|arabic_mmlu:History (Middle School)|0": {
"acc_norm": 0.3054187192118227,
"acc_norm_stderr": 0.032406615658684086
},
"community|arabic_mmlu:History (Primary School)|0": {
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006717
},
"community|arabic_mmlu:Islamic Studies|0": {
"acc_norm": 0.2347417840375587,
"acc_norm_stderr": 0.016779880420754928
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"acc_norm": 0.38323353293413176,
"acc_norm_stderr": 0.026642195538092498
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"acc_norm": 0.40756302521008403,
"acc_norm_stderr": 0.03191863374478466
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"acc_norm": 0.47047047047047047,
"acc_norm_stderr": 0.01579959687487564
},
"community|arabic_mmlu:Law (Professional)|0": {
"acc_norm": 0.6656050955414012,
"acc_norm_stderr": 0.02666649741011583
},
"community|arabic_mmlu:Management (University)|0": {
"acc_norm": 0.4266666666666667,
"acc_norm_stderr": 0.05749526681132724
},
"community|arabic_mmlu:Math (Primary School)|0": {
"acc_norm": 0.33007334963325186,
"acc_norm_stderr": 0.023280310913163793
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"acc_norm": 0.3140495867768595,
"acc_norm_stderr": 0.029897643076046507
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"acc_norm": 0.40476190476190477,
"acc_norm_stderr": 0.026817776751564906
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"acc_norm": 0.28205128205128205,
"acc_norm_stderr": 0.07299934324587597
},
"community|arabic_mmlu:Physics (High School)|0": {
"acc_norm": 0.30196078431372547,
"acc_norm_stderr": 0.02880701939354399
},
"community|arabic_mmlu:Political Science (University)|0": {
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.033144012047664914
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"acc_norm": 0.37344398340248963,
"acc_norm_stderr": 0.031223894073220737
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"acc_norm": 0.4524822695035461,
"acc_norm_stderr": 0.01875916689886046
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"community|arabic_mmlu_ht:anatomy|0": {
"acc_norm": 0.26666666666666666,
"acc_norm_stderr": 0.03820169914517905
},
"community|arabic_mmlu_ht:astronomy|0": {
"acc_norm": 0.24342105263157895,
"acc_norm_stderr": 0.034923496688842384
},
"community|arabic_mmlu_ht:business_ethics|0": {
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"acc_norm": 0.26037735849056604,
"acc_norm_stderr": 0.027008766090708094
},
"community|arabic_mmlu_ht:college_biology|0": {
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03476590104304134
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"community|arabic_mmlu_ht:college_medicine|0": {
"acc_norm": 0.28901734104046245,
"acc_norm_stderr": 0.03456425745087
},
"community|arabic_mmlu_ht:college_physics|0": {
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237654
},
"community|arabic_mmlu_ht:computer_security|0": {
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"acc_norm": 0.33617021276595743,
"acc_norm_stderr": 0.030881618520676942
},
"community|arabic_mmlu_ht:econometrics|0": {
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.039994238792813344
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"acc_norm": 0.3448275862068966,
"acc_norm_stderr": 0.03960933549451208
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"acc_norm": 0.291005291005291,
"acc_norm_stderr": 0.023393826500484875
},
"community|arabic_mmlu_ht:formal_logic|0": {
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.0393253768039287
},
"community|arabic_mmlu_ht:global_facts|0": {
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"acc_norm": 0.3047469184369263,
"acc_norm_stderr": 0.00745529231904895
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"acc_norm": 0.3047808764940239,
"acc_norm_stderr": 0.007264603856708956
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"acc_norm": 0.28655544651619236,
"acc_norm_stderr": 0.005008169287360378
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"acc_norm": 0.29292929292929293,
"acc_norm_stderr": 0.032424979581788166
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"acc_norm": 0.24352331606217617,
"acc_norm_stderr": 0.030975436386845415
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"acc_norm": 0.31615358007609823,
"acc_norm_stderr": 0.008649269302921005
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"acc_norm": 0.26296296296296295,
"acc_norm_stderr": 0.02684205787383371
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"acc_norm": 0.28991596638655465,
"acc_norm_stderr": 0.029472485833136098
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.03543304234389985
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"acc_norm": 0.30275229357798167,
"acc_norm_stderr": 0.019698711434756353
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"acc_norm": 0.30151228733459357,
"acc_norm_stderr": 0.007055217069925479
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"acc_norm": 0.35784313725490197,
"acc_norm_stderr": 0.03364487286088299
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"acc_norm": 0.3080168776371308,
"acc_norm_stderr": 0.030052389335605695
},
"community|arabic_mmlu_ht:human_aging|0": {
"acc_norm": 0.3004484304932735,
"acc_norm_stderr": 0.030769352008229136
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"acc_norm": 0.2824427480916031,
"acc_norm_stderr": 0.03948406125768362
},
"community|arabic_mmlu_ht:international_law|0": {
"acc_norm": 0.38016528925619836,
"acc_norm_stderr": 0.04431324501968432
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"acc_norm": 0.35185185185185186,
"acc_norm_stderr": 0.046166311118017146
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"acc_norm": 0.2883435582822086,
"acc_norm_stderr": 0.035590395316173425
},
"community|arabic_mmlu_ht:machine_learning|0": {
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697625
},
"community|arabic_mmlu_ht:management|0": {
"acc_norm": 0.24271844660194175,
"acc_norm_stderr": 0.04245022486384495
},
"community|arabic_mmlu_ht:marketing|0": {
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.03193705726200293
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"acc_norm": 0.31735537190082647,
"acc_norm_stderr": 0.009463511873329576
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"acc_norm": 0.3063583815028902,
"acc_norm_stderr": 0.024818350129436593
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.014333522059217889
},
"community|arabic_mmlu_ht:nutrition|0": {
"acc_norm": 0.33986928104575165,
"acc_norm_stderr": 0.027121956071388852
},
"community|arabic_mmlu_ht:philosophy|0": {
"acc_norm": 0.27009646302250806,
"acc_norm_stderr": 0.025218040373410622
},
"community|arabic_mmlu_ht:prehistory|0": {
"acc_norm": 0.3117283950617284,
"acc_norm_stderr": 0.025773111169630443
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"acc_norm": 0.2997341603898981,
"acc_norm_stderr": 0.006819725978102662
},
"community|arabic_mmlu_ht:professional_law|0": {
"acc_norm": 0.28608989608113183,
"acc_norm_stderr": 0.005057179529497309
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"acc_norm": 0.3127672571777642,
"acc_norm_stderr": 0.011462277346379139
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"acc_norm": 0.3071652868969455,
"acc_norm_stderr": 0.007795479173971138
},
"community|arabic_mmlu_ht:public_relations|0": {
"acc_norm": 0.36363636363636365,
"acc_norm_stderr": 0.04607582090719976
},
"community|arabic_mmlu_ht:security_studies|0": {
"acc_norm": 0.3142857142857143,
"acc_norm_stderr": 0.029719329422417465
},
"community|arabic_mmlu_ht:sociology|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.03333333333333334
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"acc_norm": 0.43,
"acc_norm_stderr": 0.04975698519562428
},
"community|arabic_mmlu_ht:virology|0": {
"acc_norm": 0.3614457831325301,
"acc_norm_stderr": 0.037400593820293204
},
"community|arabic_mmlu_ht:world_religions|0": {
"acc_norm": 0.22807017543859648,
"acc_norm_stderr": 0.032180937956023566
},
"community|madinah_qa:Arabic Language (General)|0": {
"acc_norm": 0.2973856209150327,
"acc_norm_stderr": 0.018492596536396955
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"acc_norm": 0.2547945205479452,
"acc_norm_stderr": 0.022839277959486398
},
"community|aratrust:Ethics|0": {
"acc_norm": 0.5333333333333333,
"acc_norm_stderr": 0.06494964005966064
},
"community|aratrust:Illegal|0": {
"acc_norm": 0.3584905660377358,
"acc_norm_stderr": 0.06650261419090689
},
"community|aratrust:MentalHealth|0": {
"acc_norm": 0.6052631578947368,
"acc_norm_stderr": 0.056441080498755805
},
"community|aratrust:Offensive|0": {
"acc_norm": 0.5942028985507246,
"acc_norm_stderr": 0.05954802978721339
},
"community|aratrust:PhysicalHealth|0": {
"acc_norm": 0.4931506849315068,
"acc_norm_stderr": 0.058920036075634445
},
"community|aratrust:Privacy|0": {
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.06680502724442025
},
"community|aratrust:Trustfulness|0": {
"acc_norm": 0.34615384615384615,
"acc_norm_stderr": 0.05421594160377287
},
"community|aratrust:Unfairness|0": {
"acc_norm": 0.3090909090909091,
"acc_norm_stderr": 0.06288639360110458
},
"community|alghafa:_average|0": {
"acc_norm": 0.34412742490766585,
"acc_norm_stderr": 0.02157068635559298
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.36512759911015624,
"acc_norm_stderr": 0.03563116843944951
},
"community|arabic_mmlu_ht:_average|0": {
"acc_norm": 0.29644786576722093,
"acc_norm_stderr": 0.03094863104492891
},
"community|madinah_qa:_average|0": {
"acc_norm": 0.27609007073148895,
"acc_norm_stderr": 0.020665937247941676
},
"community|aratrust:_average|0": {
"acc_norm": 0.4685571657271693,
"acc_norm_stderr": 0.0612835953826836
},
"all": {
"acc_norm": 0.32510894935676266,
"acc_norm_stderr": 0.031598928153533545,
"f1": 0.3793285897940562,
"f1_stderr": 0.06048137284309489
},
"community|alrage_qa|0": {
"llm_as_judge": 0.3855175688509017,
"llm_as_judge_stderr": 0.000163700073198199
}
},
"versions": {
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:Accounting (University)|0": 0,
"community|arabic_mmlu:Arabic Language (General)|0": 0,
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0,
"community|arabic_mmlu:Arabic Language (High School)|0": 0,
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0,
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0,
"community|arabic_mmlu:Biology (High School)|0": 0,
"community|arabic_mmlu:Civics (High School)|0": 0,
"community|arabic_mmlu:Civics (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (High School)|0": 0,
"community|arabic_mmlu:Computer Science (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (Primary School)|0": 0,
"community|arabic_mmlu:Computer Science (University)|0": 0,
"community|arabic_mmlu:Driving Test|0": 0,
"community|arabic_mmlu:Economics (High School)|0": 0,
"community|arabic_mmlu:Economics (Middle School)|0": 0,
"community|arabic_mmlu:Economics (University)|0": 0,
"community|arabic_mmlu:General Knowledge|0": 0,
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0,
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0,
"community|arabic_mmlu:Geography (High School)|0": 0,
"community|arabic_mmlu:Geography (Middle School)|0": 0,
"community|arabic_mmlu:Geography (Primary School)|0": 0,
"community|arabic_mmlu:History (High School)|0": 0,
"community|arabic_mmlu:History (Middle School)|0": 0,
"community|arabic_mmlu:History (Primary School)|0": 0,
"community|arabic_mmlu:Islamic Studies|0": 0,
"community|arabic_mmlu:Islamic Studies (High School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0,
"community|arabic_mmlu:Law (Professional)|0": 0,
"community|arabic_mmlu:Management (University)|0": 0,
"community|arabic_mmlu:Math (Primary School)|0": 0,
"community|arabic_mmlu:Natural Science (Middle School)|0": 0,
"community|arabic_mmlu:Natural Science (Primary School)|0": 0,
"community|arabic_mmlu:Philosophy (High School)|0": 0,
"community|arabic_mmlu:Physics (High School)|0": 0,
"community|arabic_mmlu:Political Science (University)|0": 0,
"community|arabic_mmlu:Social Science (Middle School)|0": 0,
"community|arabic_mmlu:Social Science (Primary School)|0": 0,
"community|arabic_mmlu_ht:abstract_algebra|0": 0,
"community|arabic_mmlu_ht:anatomy|0": 0,
"community|arabic_mmlu_ht:astronomy|0": 0,
"community|arabic_mmlu_ht:business_ethics|0": 0,
"community|arabic_mmlu_ht:clinical_knowledge|0": 0,
"community|arabic_mmlu_ht:college_biology|0": 0,
"community|arabic_mmlu_ht:college_chemistry|0": 0,
"community|arabic_mmlu_ht:college_computer_science|0": 0,
"community|arabic_mmlu_ht:college_mathematics|0": 0,
"community|arabic_mmlu_ht:college_medicine|0": 0,
"community|arabic_mmlu_ht:college_physics|0": 0,
"community|arabic_mmlu_ht:computer_security|0": 0,
"community|arabic_mmlu_ht:conceptual_physics|0": 0,
"community|arabic_mmlu_ht:econometrics|0": 0,
"community|arabic_mmlu_ht:electrical_engineering|0": 0,
"community|arabic_mmlu_ht:elementary_mathematics|0": 0,
"community|arabic_mmlu_ht:formal_logic|0": 0,
"community|arabic_mmlu_ht:global_facts|0": 0,
"community|arabic_mmlu_ht:high_school_biology|0": 0,
"community|arabic_mmlu_ht:high_school_chemistry|0": 0,
"community|arabic_mmlu_ht:high_school_computer_science|0": 0,
"community|arabic_mmlu_ht:high_school_european_history|0": 0,
"community|arabic_mmlu_ht:high_school_geography|0": 0,
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0,
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_mathematics|0": 0,
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_physics|0": 0,
"community|arabic_mmlu_ht:high_school_psychology|0": 0,
"community|arabic_mmlu_ht:high_school_statistics|0": 0,
"community|arabic_mmlu_ht:high_school_us_history|0": 0,
"community|arabic_mmlu_ht:high_school_world_history|0": 0,
"community|arabic_mmlu_ht:human_aging|0": 0,
"community|arabic_mmlu_ht:human_sexuality|0": 0,
"community|arabic_mmlu_ht:international_law|0": 0,
"community|arabic_mmlu_ht:jurisprudence|0": 0,
"community|arabic_mmlu_ht:logical_fallacies|0": 0,
"community|arabic_mmlu_ht:machine_learning|0": 0,
"community|arabic_mmlu_ht:management|0": 0,
"community|arabic_mmlu_ht:marketing|0": 0,
"community|arabic_mmlu_ht:medical_genetics|0": 0,
"community|arabic_mmlu_ht:miscellaneous|0": 0,
"community|arabic_mmlu_ht:moral_disputes|0": 0,
"community|arabic_mmlu_ht:moral_scenarios|0": 0,
"community|arabic_mmlu_ht:nutrition|0": 0,
"community|arabic_mmlu_ht:philosophy|0": 0,
"community|arabic_mmlu_ht:prehistory|0": 0,
"community|arabic_mmlu_ht:professional_accounting|0": 0,
"community|arabic_mmlu_ht:professional_law|0": 0,
"community|arabic_mmlu_ht:professional_medicine|0": 0,
"community|arabic_mmlu_ht:professional_psychology|0": 0,
"community|arabic_mmlu_ht:public_relations|0": 0,
"community|arabic_mmlu_ht:security_studies|0": 0,
"community|arabic_mmlu_ht:sociology|0": 0,
"community|arabic_mmlu_ht:us_foreign_policy|0": 0,
"community|arabic_mmlu_ht:virology|0": 0,
"community|arabic_mmlu_ht:world_religions|0": 0,
"community|aratrust:Ethics|0": 0,
"community|aratrust:Illegal|0": 0,
"community|aratrust:MentalHealth|0": 0,
"community|aratrust:Offensive|0": 0,
"community|aratrust:PhysicalHealth|0": 0,
"community|aratrust:Privacy|0": 0,
"community|aratrust:Trustfulness|0": 0,
"community|aratrust:Unfairness|0": 0,
"community|madinah_qa:Arabic Language (General)|0": 0,
"community|madinah_qa:Arabic Language (Grammar)|0": 0,
"community|alrage_qa|0": 0
},
"config_tasks": {
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams_pfn",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Accounting (University)": {
"name": "arabic_mmlu:Accounting (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Accounting (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 74,
"effective_num_docs": 74,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (General)": {
"name": "arabic_mmlu:Arabic Language (General)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)": {
"name": "arabic_mmlu:Arabic Language (Grammar)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (High School)": {
"name": "arabic_mmlu:Arabic Language (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)": {
"name": "arabic_mmlu:Arabic Language (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)": {
"name": "arabic_mmlu:Arabic Language (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 252,
"effective_num_docs": 252,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Biology (High School)": {
"name": "arabic_mmlu:Biology (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Biology (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1409,
"effective_num_docs": 1409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (High School)": {
"name": "arabic_mmlu:Civics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (Middle School)": {
"name": "arabic_mmlu:Civics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 236,
"effective_num_docs": 236,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (High School)": {
"name": "arabic_mmlu:Computer Science (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 261,
"effective_num_docs": 261,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Middle School)": {
"name": "arabic_mmlu:Computer Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Primary School)": {
"name": "arabic_mmlu:Computer Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 190,
"effective_num_docs": 190,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (University)": {
"name": "arabic_mmlu:Computer Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 64,
"effective_num_docs": 64,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Driving Test": {
"name": "arabic_mmlu:Driving Test",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Driving Test",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1211,
"effective_num_docs": 1211,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (High School)": {
"name": "arabic_mmlu:Economics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 360,
"effective_num_docs": 360,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (Middle School)": {
"name": "arabic_mmlu:Economics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (University)": {
"name": "arabic_mmlu:Economics (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 137,
"effective_num_docs": 137,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge": {
"name": "arabic_mmlu:General Knowledge",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 864,
"effective_num_docs": 864,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)": {
"name": "arabic_mmlu:General Knowledge (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 172,
"effective_num_docs": 172,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)": {
"name": "arabic_mmlu:General Knowledge (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 162,
"effective_num_docs": 162,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (High School)": {
"name": "arabic_mmlu:Geography (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1038,
"effective_num_docs": 1038,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Middle School)": {
"name": "arabic_mmlu:Geography (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Primary School)": {
"name": "arabic_mmlu:Geography (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (High School)": {
"name": "arabic_mmlu:History (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 760,
"effective_num_docs": 760,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Middle School)": {
"name": "arabic_mmlu:History (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Primary School)": {
"name": "arabic_mmlu:History (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies": {
"name": "arabic_mmlu:Islamic Studies",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 639,
"effective_num_docs": 639,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (High School)": {
"name": "arabic_mmlu:Islamic Studies (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 334,
"effective_num_docs": 334,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)": {
"name": "arabic_mmlu:Islamic Studies (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)": {
"name": "arabic_mmlu:Islamic Studies (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 999,
"effective_num_docs": 999,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Law (Professional)": {
"name": "arabic_mmlu:Law (Professional)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Law (Professional)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 314,
"effective_num_docs": 314,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Management (University)": {
"name": "arabic_mmlu:Management (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Management (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Math (Primary School)": {
"name": "arabic_mmlu:Math (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Math (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 409,
"effective_num_docs": 409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Middle School)": {
"name": "arabic_mmlu:Natural Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 242,
"effective_num_docs": 242,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Primary School)": {
"name": "arabic_mmlu:Natural Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 336,
"effective_num_docs": 336,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Philosophy (High School)": {
"name": "arabic_mmlu:Philosophy (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Philosophy (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 39,
"effective_num_docs": 39,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Physics (High School)": {
"name": "arabic_mmlu:Physics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Physics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Political Science (University)": {
"name": "arabic_mmlu:Political Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Political Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 210,
"effective_num_docs": 210,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Middle School)": {
"name": "arabic_mmlu:Social Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 241,
"effective_num_docs": 241,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Primary School)": {
"name": "arabic_mmlu:Social Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 705,
"effective_num_docs": 705,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:abstract_algebra": {
"name": "arabic_mmlu_ht:abstract_algebra",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "abstract_algebra",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:anatomy": {
"name": "arabic_mmlu_ht:anatomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "anatomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:astronomy": {
"name": "arabic_mmlu_ht:astronomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "astronomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:business_ethics": {
"name": "arabic_mmlu_ht:business_ethics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "business_ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:clinical_knowledge": {
"name": "arabic_mmlu_ht:clinical_knowledge",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "clinical_knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_biology": {
"name": "arabic_mmlu_ht:college_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_chemistry": {
"name": "arabic_mmlu_ht:college_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_computer_science": {
"name": "arabic_mmlu_ht:college_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_mathematics": {
"name": "arabic_mmlu_ht:college_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_medicine": {
"name": "arabic_mmlu_ht:college_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_physics": {
"name": "arabic_mmlu_ht:college_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:computer_security": {
"name": "arabic_mmlu_ht:computer_security",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "computer_security",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:conceptual_physics": {
"name": "arabic_mmlu_ht:conceptual_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "conceptual_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:econometrics": {
"name": "arabic_mmlu_ht:econometrics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "econometrics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:electrical_engineering": {
"name": "arabic_mmlu_ht:electrical_engineering",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "electrical_engineering",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:elementary_mathematics": {
"name": "arabic_mmlu_ht:elementary_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "elementary_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:formal_logic": {
"name": "arabic_mmlu_ht:formal_logic",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "formal_logic",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:global_facts": {
"name": "arabic_mmlu_ht:global_facts",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "global_facts",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_biology": {
"name": "arabic_mmlu_ht:high_school_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3813,
"effective_num_docs": 3813,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_chemistry": {
"name": "arabic_mmlu_ht:high_school_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4016,
"effective_num_docs": 4016,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_computer_science": {
"name": "arabic_mmlu_ht:high_school_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_european_history": {
"name": "arabic_mmlu_ht:high_school_european_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_european_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 8152,
"effective_num_docs": 8152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_geography": {
"name": "arabic_mmlu_ht:high_school_geography",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_geography",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics": {
"name": "arabic_mmlu_ht:high_school_government_and_politics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_government_and_politics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics": {
"name": "arabic_mmlu_ht:high_school_macroeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_macroeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2891,
"effective_num_docs": 2891,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_mathematics": {
"name": "arabic_mmlu_ht:high_school_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics": {
"name": "arabic_mmlu_ht:high_school_microeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_microeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_physics": {
"name": "arabic_mmlu_ht:high_school_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_psychology": {
"name": "arabic_mmlu_ht:high_school_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_statistics": {
"name": "arabic_mmlu_ht:high_school_statistics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_statistics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4232,
"effective_num_docs": 4232,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_us_history": {
"name": "arabic_mmlu_ht:high_school_us_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_us_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_world_history": {
"name": "arabic_mmlu_ht:high_school_world_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_world_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_aging": {
"name": "arabic_mmlu_ht:human_aging",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_aging",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_sexuality": {
"name": "arabic_mmlu_ht:human_sexuality",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_sexuality",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:international_law": {
"name": "arabic_mmlu_ht:international_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "international_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:jurisprudence": {
"name": "arabic_mmlu_ht:jurisprudence",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "jurisprudence",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:logical_fallacies": {
"name": "arabic_mmlu_ht:logical_fallacies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "logical_fallacies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:machine_learning": {
"name": "arabic_mmlu_ht:machine_learning",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "machine_learning",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:management": {
"name": "arabic_mmlu_ht:management",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "management",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:marketing": {
"name": "arabic_mmlu_ht:marketing",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "marketing",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:medical_genetics": {
"name": "arabic_mmlu_ht:medical_genetics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "medical_genetics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:miscellaneous": {
"name": "arabic_mmlu_ht:miscellaneous",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "miscellaneous",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2420,
"effective_num_docs": 2420,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_disputes": {
"name": "arabic_mmlu_ht:moral_disputes",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_disputes",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_scenarios": {
"name": "arabic_mmlu_ht:moral_scenarios",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_scenarios",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:nutrition": {
"name": "arabic_mmlu_ht:nutrition",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "nutrition",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:philosophy": {
"name": "arabic_mmlu_ht:philosophy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "philosophy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:prehistory": {
"name": "arabic_mmlu_ht:prehistory",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "prehistory",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_accounting": {
"name": "arabic_mmlu_ht:professional_accounting",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_accounting",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4514,
"effective_num_docs": 4514,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_law": {
"name": "arabic_mmlu_ht:professional_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7987,
"effective_num_docs": 7987,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_medicine": {
"name": "arabic_mmlu_ht:professional_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1637,
"effective_num_docs": 1637,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_psychology": {
"name": "arabic_mmlu_ht:professional_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3503,
"effective_num_docs": 3503,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:public_relations": {
"name": "arabic_mmlu_ht:public_relations",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "public_relations",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:security_studies": {
"name": "arabic_mmlu_ht:security_studies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "security_studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:sociology": {
"name": "arabic_mmlu_ht:sociology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "sociology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:us_foreign_policy": {
"name": "arabic_mmlu_ht:us_foreign_policy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "us_foreign_policy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:virology": {
"name": "arabic_mmlu_ht:virology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "virology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:world_religions": {
"name": "arabic_mmlu_ht:world_religions",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "world_religions",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Ethics": {
"name": "aratrust:Ethics",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 60,
"effective_num_docs": 60,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Illegal": {
"name": "aratrust:Illegal",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Illegal",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 53,
"effective_num_docs": 53,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:MentalHealth": {
"name": "aratrust:MentalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "MentalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 76,
"effective_num_docs": 76,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Offensive": {
"name": "aratrust:Offensive",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Offensive",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 69,
"effective_num_docs": 69,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:PhysicalHealth": {
"name": "aratrust:PhysicalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "PhysicalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 73,
"effective_num_docs": 73,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Privacy": {
"name": "aratrust:Privacy",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Privacy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Trustfulness": {
"name": "aratrust:Trustfulness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Trustfulness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 78,
"effective_num_docs": 78,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Unfairness": {
"name": "aratrust:Unfairness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Unfairness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 55,
"effective_num_docs": 55,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (General)": {
"name": "madinah_qa:Arabic Language (General)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (Grammar)": {
"name": "madinah_qa:Arabic Language (Grammar)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alrage_qa": {
"name": "alrage_qa",
"prompt_function": "qa_prompt_arabic",
"hf_repo": "OALL/ALRAGE",
"hf_subset": null,
"metric": [
{
"metric_name": "llm_as_judge",
"higher_is_better": true,
"category": "7",
"use_case": "10",
"sample_level_fn": "_sample_level_fn",
"corpus_level_fn": "aggregate_scores"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 200,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2106,
"effective_num_docs": 2106,
"must_remove_duplicate_docs": false,
"version": 0
}
},
"summary_tasks": {
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "80a0e949a438f854",
"hash_input_tokens": "988ea917e82b519c",
"hash_cont_tokens": "d6519f4be64b0b3f"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "6b2d3701471e4b1f",
"hash_input_tokens": "f815a58e26d80be9",
"hash_cont_tokens": "fe549766a0ce738a"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "1773c5ea6c776a5b",
"hash_input_tokens": "5fe4b5d8f9dde2e8",
"hash_cont_tokens": "0349d70fe949b783"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "4e908353c5547baa",
"hash_input_tokens": "774608b4ed9e406c",
"hash_cont_tokens": "65b9e8001c0d9d5e"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "349c75cbe8ae8c36",
"hash_input_tokens": "2f7508ec44525fb9",
"hash_cont_tokens": "707d382e7333be99"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "dc3ef65ebcc67ea0",
"hash_input_tokens": "70fc1c85f7086c73",
"hash_cont_tokens": "b2739963cb832e04"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "d322ffa50c152527",
"hash_input_tokens": "a0cdd8d5f1c54f15",
"hash_cont_tokens": "b5f274f703abc5b7"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15990,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "dd56faca891efde6",
"hash_input_tokens": "d460a837ba573265",
"hash_cont_tokens": "87c18aae5cfd812b"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17921,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "a24dad42606fd23e",
"hash_input_tokens": "c1f97cafd2198eeb",
"hash_cont_tokens": "8e03dcc04ffbd0bd"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5096,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "f4c27f32ed1304f6",
"hash_input_tokens": "f8f2eb441131fc1a",
"hash_cont_tokens": "a4ef65bc7bab8dcf"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2096,
"non_padded": 52,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Accounting (University)|0": {
"hashes": {
"hash_examples": "30e09697562ff9e7",
"hash_full_prompts": "bff0c6fb19ead57e",
"hash_input_tokens": "cb7801ff839a50c5",
"hash_cont_tokens": "587bf4caea1658f4"
},
"truncated": 0,
"non_truncated": 74,
"padded": 256,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "bef69fb8b3b75f28",
"hash_full_prompts": "fc669ccce0759520",
"hash_input_tokens": "b43a56e5c29a9562",
"hash_cont_tokens": "95a234c727b7b43c"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2400,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "bd066a9e6a140a4b",
"hash_full_prompts": "10bc873d02a1412a",
"hash_input_tokens": "327c88cc198475e1",
"hash_cont_tokens": "98f874e7446c544b"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1545,
"non_padded": 43,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"hashes": {
"hash_examples": "a9c2cd9a9929292a",
"hash_full_prompts": "6724c0709f54c0c8",
"hash_input_tokens": "603199e2da4fdda9",
"hash_cont_tokens": "3d676d0d2f081e05"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1505,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"hashes": {
"hash_examples": "2f8a77bbbd0e21ff",
"hash_full_prompts": "d217019db346cf56",
"hash_input_tokens": "9c3f21c80f0c3c2e",
"hash_cont_tokens": "322ea7667dfc2c2d"
},
"truncated": 0,
"non_truncated": 27,
"padded": 105,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"hashes": {
"hash_examples": "5eed3da47822539b",
"hash_full_prompts": "281223ef2463441f",
"hash_input_tokens": "352da821ec02cee4",
"hash_cont_tokens": "f3c78f80ddea1519"
},
"truncated": 0,
"non_truncated": 252,
"padded": 918,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Biology (High School)|0": {
"hashes": {
"hash_examples": "91ae6d22a0f0213d",
"hash_full_prompts": "a45c742b98c0bb8e",
"hash_input_tokens": "00f80b657e947bfc",
"hash_cont_tokens": "aaa20fdc3c06d2c3"
},
"truncated": 0,
"non_truncated": 1409,
"padded": 4968,
"non_padded": 88,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (High School)|0": {
"hashes": {
"hash_examples": "f27bf8791bea2bb9",
"hash_full_prompts": "f45fd30a56e6ef8e",
"hash_input_tokens": "61076ba701ef3d58",
"hash_cont_tokens": "e02c7ebfec7f8df8"
},
"truncated": 0,
"non_truncated": 87,
"padded": 312,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"hashes": {
"hash_examples": "74f5bb0098c8916f",
"hash_full_prompts": "2e4ffd2c4ac72585",
"hash_input_tokens": "5b2113db13a1f2f8",
"hash_cont_tokens": "1ffdd9a463183bfa"
},
"truncated": 0,
"non_truncated": 236,
"padded": 940,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"hashes": {
"hash_examples": "a4278d7b525d46fe",
"hash_full_prompts": "d08187d0557a9b3a",
"hash_input_tokens": "df474cd947814596",
"hash_cont_tokens": "821feca3d9004c98"
},
"truncated": 0,
"non_truncated": 261,
"padded": 994,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"hashes": {
"hash_examples": "0cb6c07e4b80dfd4",
"hash_full_prompts": "625cae891db27ba4",
"hash_input_tokens": "62d164742bcf216f",
"hash_cont_tokens": "8b4f299b6f012a83"
},
"truncated": 0,
"non_truncated": 27,
"padded": 100,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"hashes": {
"hash_examples": "d96fc1bc32473533",
"hash_full_prompts": "20d97795e82f33c2",
"hash_input_tokens": "ad9dc30f308e09ab",
"hash_cont_tokens": "1bc67f97b48b9ece"
},
"truncated": 0,
"non_truncated": 190,
"padded": 476,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (University)|0": {
"hashes": {
"hash_examples": "8835587e436cbaff",
"hash_full_prompts": "d382f30efaa2caf2",
"hash_input_tokens": "dda68b52a0f04a9d",
"hash_cont_tokens": "e9d871459bc85f62"
},
"truncated": 0,
"non_truncated": 64,
"padded": 247,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Driving Test|0": {
"hashes": {
"hash_examples": "7a4c38a2c451d075",
"hash_full_prompts": "d9e8596b43112533",
"hash_input_tokens": "62f81d209588f640",
"hash_cont_tokens": "cd411982b0f12d43"
},
"truncated": 0,
"non_truncated": 1211,
"padded": 3606,
"non_padded": 79,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (High School)|0": {
"hashes": {
"hash_examples": "c04c252836601279",
"hash_full_prompts": "dd18a221bd0b756a",
"hash_input_tokens": "217c31d00fb13f3e",
"hash_cont_tokens": "4bda66df90f2d4d8"
},
"truncated": 0,
"non_truncated": 360,
"padded": 1374,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"hashes": {
"hash_examples": "18fba1579406b3cc",
"hash_full_prompts": "bd91e2e91c10dbfb",
"hash_input_tokens": "cb055817a1115b7a",
"hash_cont_tokens": "3ea283b0f50a72f5"
},
"truncated": 0,
"non_truncated": 87,
"padded": 344,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (University)|0": {
"hashes": {
"hash_examples": "7c9e86fba8151562",
"hash_full_prompts": "127e44dd5a0dc334",
"hash_input_tokens": "d68cda9306fcf976",
"hash_cont_tokens": "91cdb256248a5bdf"
},
"truncated": 0,
"non_truncated": 137,
"padded": 532,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge|0": {
"hashes": {
"hash_examples": "acfbe4e1f0314b85",
"hash_full_prompts": "764f51eab95c84fc",
"hash_input_tokens": "bf7dfe8da816b912",
"hash_cont_tokens": "76d704fbedbe5ab8"
},
"truncated": 0,
"non_truncated": 864,
"padded": 3169,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"hashes": {
"hash_examples": "03cd0ecf10224316",
"hash_full_prompts": "37ad2c5bf7870920",
"hash_input_tokens": "91769d1dd913df7a",
"hash_cont_tokens": "aff2aed9268be2e2"
},
"truncated": 0,
"non_truncated": 172,
"padded": 607,
"non_padded": 21,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"hashes": {
"hash_examples": "c3ee30196e05e122",
"hash_full_prompts": "80e56ef0c04ed68c",
"hash_input_tokens": "8f70b17a2fee569f",
"hash_cont_tokens": "6c8978669cdc11fb"
},
"truncated": 0,
"non_truncated": 162,
"padded": 629,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (High School)|0": {
"hashes": {
"hash_examples": "e2e329d2bdd9fb7b",
"hash_full_prompts": "a4eb00d949311057",
"hash_input_tokens": "12b4d28886126555",
"hash_cont_tokens": "37e2e9c548d8c904"
},
"truncated": 0,
"non_truncated": 1038,
"padded": 4052,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"hashes": {
"hash_examples": "420b161444291989",
"hash_full_prompts": "a66bba4fc02faba0",
"hash_input_tokens": "123f45898a27a829",
"hash_cont_tokens": "5e24bb4c8be23901"
},
"truncated": 0,
"non_truncated": 272,
"padded": 966,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"hashes": {
"hash_examples": "5bc5ca48a4210899",
"hash_full_prompts": "cbda4d0ffc67d06b",
"hash_input_tokens": "d569cd67c3772ade",
"hash_cont_tokens": "b9f45957a97d1ecf"
},
"truncated": 0,
"non_truncated": 57,
"padded": 216,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (High School)|0": {
"hashes": {
"hash_examples": "c7cc37f29311bea1",
"hash_full_prompts": "88724da4a11d438f",
"hash_input_tokens": "f1df2ac0d6a438f1",
"hash_cont_tokens": "b16e65544485acae"
},
"truncated": 0,
"non_truncated": 760,
"padded": 2886,
"non_padded": 76,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Middle School)|0": {
"hashes": {
"hash_examples": "5b9f1973337153a2",
"hash_full_prompts": "2d98d366c39f32a8",
"hash_input_tokens": "c182977d1d11f918",
"hash_cont_tokens": "e3b355a58a286ee4"
},
"truncated": 0,
"non_truncated": 203,
"padded": 734,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Primary School)|0": {
"hashes": {
"hash_examples": "af2469847007c1fe",
"hash_full_prompts": "df3b23effab097a0",
"hash_input_tokens": "89da2ebc10d8d424",
"hash_cont_tokens": "a52a22630c3cb3f7"
},
"truncated": 0,
"non_truncated": 102,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies|0": {
"hashes": {
"hash_examples": "c8da9b2f16a5ea0f",
"hash_full_prompts": "a8a474da3abfdbdb",
"hash_input_tokens": "4b677fd214caaca0",
"hash_cont_tokens": "1866597a67ff4424"
},
"truncated": 0,
"non_truncated": 639,
"padded": 2493,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"hashes": {
"hash_examples": "efb11bc8ef398117",
"hash_full_prompts": "9165dd4c8e696ae0",
"hash_input_tokens": "4cf6de4862ddd06c",
"hash_cont_tokens": "6b678abb2fd451bd"
},
"truncated": 0,
"non_truncated": 334,
"padded": 1281,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"hashes": {
"hash_examples": "9e33ab030eebdb99",
"hash_full_prompts": "e4fb7447e1b17c38",
"hash_input_tokens": "f38150a86f3b67e9",
"hash_cont_tokens": "e0c922e595ad51cd"
},
"truncated": 0,
"non_truncated": 238,
"padded": 867,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"hashes": {
"hash_examples": "4167565d878b20eb",
"hash_full_prompts": "2bf9da9ac8b969b6",
"hash_input_tokens": "eacff23a804c5481",
"hash_cont_tokens": "97e2182a63c5686c"
},
"truncated": 0,
"non_truncated": 999,
"padded": 2969,
"non_padded": 55,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Law (Professional)|0": {
"hashes": {
"hash_examples": "e77f52c8fe4352b3",
"hash_full_prompts": "35364a07a6ccb7d0",
"hash_input_tokens": "2c1e76376b16daf0",
"hash_cont_tokens": "324cc46c561b417c"
},
"truncated": 0,
"non_truncated": 314,
"padded": 1223,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Management (University)|0": {
"hashes": {
"hash_examples": "09682649b04b7327",
"hash_full_prompts": "1f5a262586618a50",
"hash_input_tokens": "5d7a6224db331bee",
"hash_cont_tokens": "1e98e1e2cd19a5e3"
},
"truncated": 0,
"non_truncated": 75,
"padded": 200,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Math (Primary School)|0": {
"hashes": {
"hash_examples": "edb027bfae7e76f1",
"hash_full_prompts": "f41fb4acbf7affae",
"hash_input_tokens": "1cb504e849eb9f91",
"hash_cont_tokens": "632401a080490684"
},
"truncated": 0,
"non_truncated": 409,
"padded": 1290,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"hashes": {
"hash_examples": "96e72c9094c2364c",
"hash_full_prompts": "d0f723d3ce6ad0de",
"hash_input_tokens": "f4ebef2dee8fce17",
"hash_cont_tokens": "17e42af5dbb9eee1"
},
"truncated": 0,
"non_truncated": 242,
"padded": 924,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"hashes": {
"hash_examples": "69e35bad3dec5a4d",
"hash_full_prompts": "2892d842afda3be6",
"hash_input_tokens": "661bb7446a794af9",
"hash_cont_tokens": "a7423721c9837336"
},
"truncated": 0,
"non_truncated": 336,
"padded": 1206,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"hashes": {
"hash_examples": "dc6ebd484a02fca5",
"hash_full_prompts": "3a82da9c1ad5b47d",
"hash_input_tokens": "2887e480819bd2de",
"hash_cont_tokens": "69b31fc6977897bf"
},
"truncated": 0,
"non_truncated": 39,
"padded": 156,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Physics (High School)|0": {
"hashes": {
"hash_examples": "58a1722472c9e644",
"hash_full_prompts": "085146655d8ef6dc",
"hash_input_tokens": "d0408a46166a6195",
"hash_cont_tokens": "f9506aa86f66954d"
},
"truncated": 0,
"non_truncated": 255,
"padded": 996,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Political Science (University)|0": {
"hashes": {
"hash_examples": "07a4ed6aabbdfd1e",
"hash_full_prompts": "c179a3822417b264",
"hash_input_tokens": "0fbd514d48c026c2",
"hash_cont_tokens": "4799b66f49438465"
},
"truncated": 0,
"non_truncated": 210,
"padded": 688,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"hashes": {
"hash_examples": "8ca955902f304664",
"hash_full_prompts": "7fc55350d4083674",
"hash_input_tokens": "f9571a767e49249e",
"hash_cont_tokens": "4602cb88db99312d"
},
"truncated": 0,
"non_truncated": 241,
"padded": 919,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"hashes": {
"hash_examples": "934025ab3738123c",
"hash_full_prompts": "ad76f074b992e2e0",
"hash_input_tokens": "ef5cb7d63c15c552",
"hash_cont_tokens": "19e973e9f05c9c82"
},
"truncated": 0,
"non_truncated": 705,
"padded": 2004,
"non_padded": 39,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"hashes": {
"hash_examples": "0b557911f2f6d919",
"hash_full_prompts": "e69f74a47413aebb",
"hash_input_tokens": "8eb770d33cf2ea88",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:anatomy|0": {
"hashes": {
"hash_examples": "a552d8a0ef294061",
"hash_full_prompts": "9a06cc1bb7b5b499",
"hash_input_tokens": "5c9baddfaf660ece",
"hash_cont_tokens": "96c000fa61c3bd55"
},
"truncated": 0,
"non_truncated": 135,
"padded": 532,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:astronomy|0": {
"hashes": {
"hash_examples": "c4a372d0af7da098",
"hash_full_prompts": "a94afb4ca0c2d256",
"hash_input_tokens": "579d42c8c34dcfa5",
"hash_cont_tokens": "b13cc32205751d90"
},
"truncated": 0,
"non_truncated": 152,
"padded": 604,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:business_ethics|0": {
"hashes": {
"hash_examples": "9f71d816abf8af7a",
"hash_full_prompts": "34d3001ccccff521",
"hash_input_tokens": "71ab05a7953cb9f2",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"hashes": {
"hash_examples": "38303cd765589ef3",
"hash_full_prompts": "1556725f5b45ac71",
"hash_input_tokens": "7704b8aba5f15da2",
"hash_cont_tokens": "c771582839d4f30c"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1028,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_biology|0": {
"hashes": {
"hash_examples": "dbd9b5d318e60b04",
"hash_full_prompts": "78f4e9cb9660d3ca",
"hash_input_tokens": "c2e25c33e7c3a1f8",
"hash_cont_tokens": "ec774ac0d0ad658b"
},
"truncated": 0,
"non_truncated": 144,
"padded": 572,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"hashes": {
"hash_examples": "6f88491d03db8a4c",
"hash_full_prompts": "2d0102fc2e009e99",
"hash_input_tokens": "a73209098fb12f5b",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"hashes": {
"hash_examples": "ebfdee5ef2ed5e17",
"hash_full_prompts": "aacfd01dc053d008",
"hash_input_tokens": "9e107774ccf848e7",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"hashes": {
"hash_examples": "e3f22cd7712aae2f",
"hash_full_prompts": "86a3d00a93e44a42",
"hash_input_tokens": "3ab18429e0f8d79e",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_medicine|0": {
"hashes": {
"hash_examples": "51a5501373afb5a7",
"hash_full_prompts": "2d611f649df6bd4c",
"hash_input_tokens": "6bab4bd746d0913b",
"hash_cont_tokens": "1823a754e6394181"
},
"truncated": 0,
"non_truncated": 173,
"padded": 680,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_physics|0": {
"hashes": {
"hash_examples": "2d3e015989b108db",
"hash_full_prompts": "8206c1d53c258116",
"hash_input_tokens": "cc114104a0d11b1f",
"hash_cont_tokens": "ee5dc873d27b9e10"
},
"truncated": 0,
"non_truncated": 102,
"padded": 404,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:computer_security|0": {
"hashes": {
"hash_examples": "f8810eddc38dfee4",
"hash_full_prompts": "aa67ba537bd6d9cb",
"hash_input_tokens": "57a6e8a7be9fce1f",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"hashes": {
"hash_examples": "211e32cc43c6b1dc",
"hash_full_prompts": "5b26750c0a54b21a",
"hash_input_tokens": "55e34156f2180f21",
"hash_cont_tokens": "b7b580bbcf7e0afa"
},
"truncated": 0,
"non_truncated": 235,
"padded": 896,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:econometrics|0": {
"hashes": {
"hash_examples": "810023786b2484d2",
"hash_full_prompts": "1554f608d8cbebad",
"hash_input_tokens": "442dca32af0c83e9",
"hash_cont_tokens": "d44932b2a931e093"
},
"truncated": 0,
"non_truncated": 114,
"padded": 452,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"hashes": {
"hash_examples": "a222760c93eaa1ee",
"hash_full_prompts": "69acb104ef4c55c2",
"hash_input_tokens": "73f1fa484eed0c8e",
"hash_cont_tokens": "159f4cb1232d2a3c"
},
"truncated": 0,
"non_truncated": 145,
"padded": 544,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"hashes": {
"hash_examples": "4c069aeee64dc227",
"hash_full_prompts": "f53b58db6fd4b44a",
"hash_input_tokens": "531de409d75643ff",
"hash_cont_tokens": "2bf44b70baf49dfa"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1500,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:formal_logic|0": {
"hashes": {
"hash_examples": "3cb0ccbf8e8a77ae",
"hash_full_prompts": "c4a28d323f6cb522",
"hash_input_tokens": "d6756a6a90e3b537",
"hash_cont_tokens": "8765c45f4711ebb8"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:global_facts|0": {
"hashes": {
"hash_examples": "c1d039e64ea321b9",
"hash_full_prompts": "e5fed902d9227552",
"hash_input_tokens": "5035363329928c7e",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 388,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"hashes": {
"hash_examples": "ddcb8237bb4ba08a",
"hash_full_prompts": "719c7f7a3c6baac4",
"hash_input_tokens": "c6bea9c89c39854a",
"hash_cont_tokens": "49908817551a4513"
},
"truncated": 0,
"non_truncated": 3813,
"padded": 15100,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"hashes": {
"hash_examples": "07061b55c5c436d9",
"hash_full_prompts": "1527546cad57c6e8",
"hash_input_tokens": "32e3d4b83c66cfb3",
"hash_cont_tokens": "a7f16a586e1cfe0f"
},
"truncated": 0,
"non_truncated": 4016,
"padded": 15912,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"hashes": {
"hash_examples": "8d3405483d5fdcff",
"hash_full_prompts": "52bddb525d63cfb6",
"hash_input_tokens": "3ea0b21f87b03c76",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"hashes": {
"hash_examples": "031c49a430356414",
"hash_full_prompts": "a776b57861a8f770",
"hash_input_tokens": "9d348e296a3c292e",
"hash_cont_tokens": "5420388845898571"
},
"truncated": 0,
"non_truncated": 8152,
"padded": 32448,
"non_padded": 160,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"hashes": {
"hash_examples": "d0ce2b019a66c1de",
"hash_full_prompts": "c2feec8be4e1ff30",
"hash_input_tokens": "e57d8eae391ffd72",
"hash_cont_tokens": "fa4a2c8384dfaaa5"
},
"truncated": 0,
"non_truncated": 198,
"padded": 768,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "7d7c6d476d0576b1",
"hash_full_prompts": "a425a9deee8f47b5",
"hash_input_tokens": "342f166873e21e8a",
"hash_cont_tokens": "682709d2fa91c75e"
},
"truncated": 0,
"non_truncated": 193,
"padded": 768,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "694d3a01c6144ddb",
"hash_full_prompts": "ecbc56e8841a440f",
"hash_input_tokens": "ed8c3da0a3b3f261",
"hash_cont_tokens": "4f2f97c723cb220f"
},
"truncated": 0,
"non_truncated": 2891,
"padded": 11440,
"non_padded": 124,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"hashes": {
"hash_examples": "004f9c0a40b5ec10",
"hash_full_prompts": "b062392cb5100ded",
"hash_input_tokens": "1c43569f9032a36a",
"hash_cont_tokens": "8130a825e5a2ee3d"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1072,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "80cf03d462e6ccbc",
"hash_full_prompts": "85d5dfb962fc6d98",
"hash_input_tokens": "4086753e3056c5dd",
"hash_cont_tokens": "4f6974070ef28d29"
},
"truncated": 0,
"non_truncated": 238,
"padded": 948,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"hashes": {
"hash_examples": "92218def5b383845",
"hash_full_prompts": "1c8559c031860760",
"hash_input_tokens": "5010cd1eafb66faf",
"hash_cont_tokens": "5d32bcd7ba8252ba"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"hashes": {
"hash_examples": "323f7848fee32e58",
"hash_full_prompts": "8035a60e22c7171e",
"hash_input_tokens": "90657a781e4e3261",
"hash_cont_tokens": "1512a6938229952b"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2156,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"hashes": {
"hash_examples": "d7bbe0d037cf31ec",
"hash_full_prompts": "6158ad415c6366aa",
"hash_input_tokens": "398c0ae7335b4e99",
"hash_cont_tokens": "95cb29e5c31221c8"
},
"truncated": 0,
"non_truncated": 4232,
"padded": 16776,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"hashes": {
"hash_examples": "722ec9207e3b0e04",
"hash_full_prompts": "5b4434e41ee5ad44",
"hash_input_tokens": "7fc443b784a36a2b",
"hash_cont_tokens": "0c31c2de1e3429bf"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"hashes": {
"hash_examples": "b5eb675d3b578584",
"hash_full_prompts": "9734067c4bba2d90",
"hash_input_tokens": "cb39d47a21b6b1ff",
"hash_cont_tokens": "5e704d9d54138833"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_aging|0": {
"hashes": {
"hash_examples": "713ac79cd2dd2d7b",
"hash_full_prompts": "a7abe635cf733537",
"hash_input_tokens": "f28d5cb964c3d6c1",
"hash_cont_tokens": "e5a3e63957647f04"
},
"truncated": 0,
"non_truncated": 223,
"padded": 868,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"hashes": {
"hash_examples": "47551ab4e5dcf6c5",
"hash_full_prompts": "1f816ff56cc17aa6",
"hash_input_tokens": "32ef9a6c4bda75ec",
"hash_cont_tokens": "90a9b6d1231332f4"
},
"truncated": 0,
"non_truncated": 131,
"padded": 512,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:international_law|0": {
"hashes": {
"hash_examples": "da360336943398d5",
"hash_full_prompts": "fb06b1e1341ef7fd",
"hash_input_tokens": "00e18b7cc18df78c",
"hash_cont_tokens": "9ab33ab519d55748"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"hashes": {
"hash_examples": "661d161a486fb035",
"hash_full_prompts": "c1f348bac6d3e718",
"hash_input_tokens": "d7d4df070d09807e",
"hash_cont_tokens": "2cecb6db3790a23b"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"hashes": {
"hash_examples": "5c3926384758bda7",
"hash_full_prompts": "0cde216cf861d61f",
"hash_input_tokens": "477d683f3a34ab49",
"hash_cont_tokens": "f5c60e363dd9fc3d"
},
"truncated": 0,
"non_truncated": 163,
"padded": 640,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:machine_learning|0": {
"hashes": {
"hash_examples": "3ce756e6a22ffc48",
"hash_full_prompts": "2e571d1b5320c81f",
"hash_input_tokens": "ca99ea7495acfc66",
"hash_cont_tokens": "d41e7e44237c0a16"
},
"truncated": 0,
"non_truncated": 112,
"padded": 444,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:management|0": {
"hashes": {
"hash_examples": "20fe769bb3276832",
"hash_full_prompts": "addddee958040e66",
"hash_input_tokens": "7b8702d437bc6901",
"hash_cont_tokens": "372864196dbb4cad"
},
"truncated": 0,
"non_truncated": 103,
"padded": 396,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:marketing|0": {
"hashes": {
"hash_examples": "6b19449559d987ce",
"hash_full_prompts": "fcc1001dcd8658fe",
"hash_input_tokens": "a8d4c75abf86f719",
"hash_cont_tokens": "ad74b6b4e88f6100"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"hashes": {
"hash_examples": "cbb0fa9df0f5435a",
"hash_full_prompts": "b9d85ecf87796575",
"hash_input_tokens": "5cecc6f873ef619c",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 384,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"hashes": {
"hash_examples": "0a4134046c23cff9",
"hash_full_prompts": "d46469b4de083b50",
"hash_input_tokens": "9f9cb61cd9a789eb",
"hash_cont_tokens": "f2831dc319b7001c"
},
"truncated": 0,
"non_truncated": 2420,
"padded": 9580,
"non_padded": 100,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"hashes": {
"hash_examples": "1ac8a0967c82caa0",
"hash_full_prompts": "7ccf1d3ae4eab339",
"hash_input_tokens": "89493f1c4e8306bd",
"hash_cont_tokens": "d6a32c4f89ec0e43"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1368,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"hashes": {
"hash_examples": "2c0670188bc5a789",
"hash_full_prompts": "44466154f6fbec71",
"hash_input_tokens": "f60cd1df7f66cd7a",
"hash_cont_tokens": "0abad6841e9b5dc1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3504,
"non_padded": 76,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:nutrition|0": {
"hashes": {
"hash_examples": "658628c0dcdfe201",
"hash_full_prompts": "2ebbbdaea80d9c0f",
"hash_input_tokens": "f0ec440c8774ee18",
"hash_cont_tokens": "1947ff415070dfa5"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1208,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:philosophy|0": {
"hashes": {
"hash_examples": "8b6707b322affafd",
"hash_full_prompts": "72b1804dd62e9842",
"hash_input_tokens": "09792b5775274599",
"hash_cont_tokens": "566ed263a8423f58"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1224,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:prehistory|0": {
"hashes": {
"hash_examples": "0c85ffcdc9a7b367",
"hash_full_prompts": "70d2d35428e8ed51",
"hash_input_tokens": "e6d344770642264d",
"hash_cont_tokens": "69725bb3099f23d0"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1272,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"hashes": {
"hash_examples": "cce1ea2d5f544b2f",
"hash_full_prompts": "1bef23608081ff7b",
"hash_input_tokens": "cee38ca136d19009",
"hash_cont_tokens": "25802ac32c51a7f7"
},
"truncated": 0,
"non_truncated": 4514,
"padded": 17819,
"non_padded": 237,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_law|0": {
"hashes": {
"hash_examples": "1c654b024b54eb4b",
"hash_full_prompts": "6525f44784846cab",
"hash_input_tokens": "0a5b168437f89e8c",
"hash_cont_tokens": "7f2b1b7218a1ef40"
},
"truncated": 0,
"non_truncated": 7987,
"padded": 31596,
"non_padded": 352,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"hashes": {
"hash_examples": "c621eaacfa662ebc",
"hash_full_prompts": "7875238389eeca83",
"hash_input_tokens": "546c47dbdd0e550e",
"hash_cont_tokens": "e1afe1503a5d02c5"
},
"truncated": 0,
"non_truncated": 1637,
"padded": 6476,
"non_padded": 72,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"hashes": {
"hash_examples": "bc14a28eaec87dc4",
"hash_full_prompts": "3e80ba9be5ad5629",
"hash_input_tokens": "3c4e1d4ff9109089",
"hash_cont_tokens": "1c41f3eeadeec685"
},
"truncated": 0,
"non_truncated": 3503,
"padded": 13760,
"non_padded": 252,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:public_relations|0": {
"hashes": {
"hash_examples": "de4989d9375885c4",
"hash_full_prompts": "79e175750959cd9e",
"hash_input_tokens": "be94c578c81f1ead",
"hash_cont_tokens": "3914ab4a5d5b69e8"
},
"truncated": 0,
"non_truncated": 110,
"padded": 432,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:security_studies|0": {
"hashes": {
"hash_examples": "3f84bfeec717c6de",
"hash_full_prompts": "5bf146758e2e0263",
"hash_input_tokens": "71fea4ac2c6edca6",
"hash_cont_tokens": "1d2b199df736dea4"
},
"truncated": 0,
"non_truncated": 245,
"padded": 972,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:sociology|0": {
"hashes": {
"hash_examples": "10d7c2fae10bfcbc",
"hash_full_prompts": "fa261fced60e82fc",
"hash_input_tokens": "ed6086c1da753b7d",
"hash_cont_tokens": "3556cb090eda6dec"
},
"truncated": 0,
"non_truncated": 201,
"padded": 788,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"hashes": {
"hash_examples": "bb05f02c38ddaf1a",
"hash_full_prompts": "0dfe513238bd0061",
"hash_input_tokens": "cedca948696026ec",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 376,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:virology|0": {
"hashes": {
"hash_examples": "290915a48884ede2",
"hash_full_prompts": "694770ed73f5c26a",
"hash_input_tokens": "6b0e773958a18bf5",
"hash_cont_tokens": "cbf93f8f3bd5c82c"
},
"truncated": 0,
"non_truncated": 166,
"padded": 636,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:world_religions|0": {
"hashes": {
"hash_examples": "91cc5451c7284f75",
"hash_full_prompts": "71844a79ee0d8a3d",
"hash_input_tokens": "33d85cc0318b81b5",
"hash_cont_tokens": "b5fbc024ac54a858"
},
"truncated": 0,
"non_truncated": 171,
"padded": 672,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "bef69fb8b3b75f28",
"hash_full_prompts": "fc669ccce0759520",
"hash_input_tokens": "84cf84610a88790d",
"hash_cont_tokens": "05d3f2bc980e6cbb"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2346,
"non_padded": 57,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "bd066a9e6a140a4b",
"hash_full_prompts": "10bc873d02a1412a",
"hash_input_tokens": "45e5a32c7838fd96",
"hash_cont_tokens": "ac1327c8a93a78f2"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1521,
"non_padded": 67,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Ethics|0": {
"hashes": {
"hash_examples": "5d32da36271c5eb4",
"hash_full_prompts": "bf216386871f979e",
"hash_input_tokens": "ca3048884e5363dd",
"hash_cont_tokens": "67fe5dc315ef723c"
},
"truncated": 0,
"non_truncated": 60,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Illegal|0": {
"hashes": {
"hash_examples": "0c07f1f100f2d0e8",
"hash_full_prompts": "e8685254a9800856",
"hash_input_tokens": "6584a5439057a846",
"hash_cont_tokens": "2cc82a58b4d87abc"
},
"truncated": 0,
"non_truncated": 53,
"padded": 159,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:MentalHealth|0": {
"hashes": {
"hash_examples": "8e5fc5c4704bd96b",
"hash_full_prompts": "7bda37378742ecfe",
"hash_input_tokens": "c1dd22d5d2e668ce",
"hash_cont_tokens": "7b399d0f0a9124f1"
},
"truncated": 0,
"non_truncated": 76,
"padded": 228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Offensive|0": {
"hashes": {
"hash_examples": "5ad4369b7dc5de46",
"hash_full_prompts": "5fb2efd23e5ac881",
"hash_input_tokens": "66da6c3d5844dffd",
"hash_cont_tokens": "0cd5015bc3370adf"
},
"truncated": 0,
"non_truncated": 69,
"padded": 207,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:PhysicalHealth|0": {
"hashes": {
"hash_examples": "dc2a632e2dcc86db",
"hash_full_prompts": "bf54e2b8d0999424",
"hash_input_tokens": "868b92d974ebd93d",
"hash_cont_tokens": "cb8655dcad91858d"
},
"truncated": 0,
"non_truncated": 73,
"padded": 210,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Privacy|0": {
"hashes": {
"hash_examples": "295e35448a39e003",
"hash_full_prompts": "e7c7f61380b9f5b3",
"hash_input_tokens": "8e4ed2404fbe3efd",
"hash_cont_tokens": "7f23416c661e2ee5"
},
"truncated": 0,
"non_truncated": 57,
"padded": 162,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Trustfulness|0": {
"hashes": {
"hash_examples": "e79ac1ea5439e623",
"hash_full_prompts": "d3bde62ee380527f",
"hash_input_tokens": "eaa0417e35f3b7a0",
"hash_cont_tokens": "ff874dba360c1ede"
},
"truncated": 0,
"non_truncated": 78,
"padded": 228,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Unfairness|0": {
"hashes": {
"hash_examples": "4ac5dccbfbdc5077",
"hash_full_prompts": "88e12633a1dbe81d",
"hash_input_tokens": "e5268e7139fe3ca8",
"hash_cont_tokens": "3e990fe3a474dbc5"
},
"truncated": 0,
"non_truncated": 55,
"padded": 159,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alrage_qa|0": {
"hashes": {
"hash_examples": "3edbbe22cabd4160",
"hash_full_prompts": "91c8d5ed9f9796ea",
"hash_input_tokens": "dbccd73d9313a2b1",
"hash_cont_tokens": "e908135d39af80e1"
},
"truncated": 2106,
"non_truncated": 0,
"padded": 2106,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "b8b3b49631adcc40",
"hash_full_prompts": "a49e21e87f93468f",
"hash_input_tokens": "f8010e6a122010bf",
"hash_cont_tokens": "0743c809690f391a"
},
"truncated": 219,
"non_truncated": 91643,
"padded": 333440,
"non_padded": 3444,
"num_truncated_few_shots": 0
}
}