Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
v2_results / inceptionai /jais-adapted-70b /results_2025-01-23T14-14-09.083151.json
amztheory's picture
Upload inceptionai/jais-adapted-70b/results_2025-01-23T14-14-09.083151.json with huggingface_hub
8e93229 verified
raw
history blame
246 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": 0,
"start_time": 729.564474764,
"end_time": 73295.384408033,
"total_evaluation_time_secondes": "72565.819933269",
"model_name": "inceptionai/jais-adapted-70b",
"model_sha": "ea7672c31df9ed1301831732d8c37b85d84847e4",
"model_dtype": "torch.float16",
"model_size": "129.46 GB"
},
"results": {
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.4254937163375224,
"acc_norm_stderr": 0.020967978914021683
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.353660797034291,
"acc_norm_stderr": 0.0065098088858809855
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.40893854748603353,
"acc_norm_stderr": 0.01644283065471554
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.54,
"acc_norm_stderr": 0.040830308521485996
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.44,
"acc_norm_stderr": 0.040665603096078466
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.8391494684177611,
"acc_norm_stderr": 0.004109120150384934
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.5688073394495413,
"acc_norm_stderr": 0.0063967577868139286
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.37034883720930234,
"acc_norm_stderr": 0.011647093428523898
},
"community|arabic_exams|0": {
"acc_norm": 0.43575418994413406,
"acc_norm_stderr": 0.02141768675103408
},
"community|arabic_mmlu:Accounting (University)|0": {
"acc_norm": 0.4189189189189189,
"acc_norm_stderr": 0.057746002446083286
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"acc_norm": 0.5130718954248366,
"acc_norm_stderr": 0.020220920829626923
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"acc_norm": 0.27671232876712326,
"acc_norm_stderr": 0.02344871747678411
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"acc_norm": 0.3230769230769231,
"acc_norm_stderr": 0.02371088850197057
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.09745089103411436
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"acc_norm": 0.5634920634920635,
"acc_norm_stderr": 0.031304236305194744
},
"community|arabic_mmlu:Biology (High School)|0": {
"acc_norm": 0.40809084457061745,
"acc_norm_stderr": 0.013097989174594562
},
"community|arabic_mmlu:Civics (High School)|0": {
"acc_norm": 0.4482758620689655,
"acc_norm_stderr": 0.05362711627041053
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"acc_norm": 0.3855932203389831,
"acc_norm_stderr": 0.031751099077481804
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"acc_norm": 0.4521072796934866,
"acc_norm_stderr": 0.030866105840801242
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"acc_norm": 0.7037037037037037,
"acc_norm_stderr": 0.0895511888632576
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"acc_norm": 0.5947368421052631,
"acc_norm_stderr": 0.03571084126496387
},
"community|arabic_mmlu:Computer Science (University)|0": {
"acc_norm": 0.59375,
"acc_norm_stderr": 0.061876853828249374
},
"community|arabic_mmlu:Driving Test|0": {
"acc_norm": 0.6366639141205616,
"acc_norm_stderr": 0.013826641387094463
},
"community|arabic_mmlu:Economics (High School)|0": {
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.026382474093672136
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"acc_norm": 0.5977011494252874,
"acc_norm_stderr": 0.05287704973221805
},
"community|arabic_mmlu:Economics (University)|0": {
"acc_norm": 0.5255474452554745,
"acc_norm_stderr": 0.04281864355155348
},
"community|arabic_mmlu:General Knowledge|0": {
"acc_norm": 0.5914351851851852,
"acc_norm_stderr": 0.016733186920240433
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"acc_norm": 0.5523255813953488,
"acc_norm_stderr": 0.038026001686722086
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"acc_norm": 0.6049382716049383,
"acc_norm_stderr": 0.038527876424962665
},
"community|arabic_mmlu:Geography (High School)|0": {
"acc_norm": 0.43641618497109824,
"acc_norm_stderr": 0.01540069437207651
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"acc_norm": 0.5183823529411765,
"acc_norm_stderr": 0.03035230339535197
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"acc_norm": 0.631578947368421,
"acc_norm_stderr": 0.06446025638903097
},
"community|arabic_mmlu:History (High School)|0": {
"acc_norm": 0.38552631578947366,
"acc_norm_stderr": 0.01766679502364449
},
"community|arabic_mmlu:History (Middle School)|0": {
"acc_norm": 0.5270935960591133,
"acc_norm_stderr": 0.03512819077876106
},
"community|arabic_mmlu:History (Primary School)|0": {
"acc_norm": 0.5490196078431373,
"acc_norm_stderr": 0.049512182523962625
},
"community|arabic_mmlu:Islamic Studies|0": {
"acc_norm": 0.5211267605633803,
"acc_norm_stderr": 0.019777510897112962
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"acc_norm": 0.5538922155688623,
"acc_norm_stderr": 0.027240207712487363
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"acc_norm": 0.5840336134453782,
"acc_norm_stderr": 0.03201650100739611
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"acc_norm": 0.6626626626626627,
"acc_norm_stderr": 0.0149662565742802
},
"community|arabic_mmlu:Law (Professional)|0": {
"acc_norm": 0.5445859872611465,
"acc_norm_stderr": 0.02814908335519747
},
"community|arabic_mmlu:Management (University)|0": {
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.05479966243511907
},
"community|arabic_mmlu:Math (Primary School)|0": {
"acc_norm": 0.48655256723716384,
"acc_norm_stderr": 0.02474473436519647
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"acc_norm": 0.6074380165289256,
"acc_norm_stderr": 0.03145549926551581
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"acc_norm": 0.6488095238095238,
"acc_norm_stderr": 0.026079998948332458
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"acc_norm": 0.41025641025641024,
"acc_norm_stderr": 0.0797934979708204
},
"community|arabic_mmlu:Physics (High School)|0": {
"acc_norm": 0.34901960784313724,
"acc_norm_stderr": 0.029908319306125596
},
"community|arabic_mmlu:Political Science (University)|0": {
"acc_norm": 0.5476190476190477,
"acc_norm_stderr": 0.03442851454672489
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"acc_norm": 0.44813278008298757,
"acc_norm_stderr": 0.032100739315089555
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"acc_norm": 0.5858156028368794,
"acc_norm_stderr": 0.01856483120920678
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"community|arabic_mmlu_ht:anatomy|0": {
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.043097329010363554
},
"community|arabic_mmlu_ht:astronomy|0": {
"acc_norm": 0.48026315789473684,
"acc_norm_stderr": 0.040657710025626036
},
"community|arabic_mmlu_ht:business_ethics|0": {
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"acc_norm": 0.5132075471698113,
"acc_norm_stderr": 0.030762134874500476
},
"community|arabic_mmlu_ht:college_biology|0": {
"acc_norm": 0.4583333333333333,
"acc_norm_stderr": 0.04166666666666665
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"community|arabic_mmlu_ht:college_medicine|0": {
"acc_norm": 0.45664739884393063,
"acc_norm_stderr": 0.03798106566014498
},
"community|arabic_mmlu_ht:college_physics|0": {
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.04724007352383888
},
"community|arabic_mmlu_ht:computer_security|0": {
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"acc_norm": 0.425531914893617,
"acc_norm_stderr": 0.032321469162244675
},
"community|arabic_mmlu_ht:econometrics|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04434600701584925
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"acc_norm": 0.46206896551724136,
"acc_norm_stderr": 0.041546596717075474
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"acc_norm": 0.29365079365079366,
"acc_norm_stderr": 0.02345603738398203
},
"community|arabic_mmlu_ht:formal_logic|0": {
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.04190596438871136
},
"community|arabic_mmlu_ht:global_facts|0": {
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"acc_norm": 0.5245213742460005,
"acc_norm_stderr": 0.008088549518744343
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"acc_norm": 0.5119521912350598,
"acc_norm_stderr": 0.007888657701404784
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"acc_norm": 0.43940137389597644,
"acc_norm_stderr": 0.0054973232298496785
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.03521224908841585
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"acc_norm": 0.6476683937823834,
"acc_norm_stderr": 0.03447478286414358
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"acc_norm": 0.5268073331027326,
"acc_norm_stderr": 0.009287439277664436
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.02564410863926763
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"acc_norm": 0.41596638655462187,
"acc_norm_stderr": 0.03201650100739615
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.03822746937658754
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"acc_norm": 0.6256880733944954,
"acc_norm_stderr": 0.02074895940898832
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"acc_norm": 0.502835538752363,
"acc_norm_stderr": 0.007686727899440979
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"acc_norm": 0.5882352941176471,
"acc_norm_stderr": 0.03454236585380609
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"acc_norm": 0.6118143459915611,
"acc_norm_stderr": 0.03172295004332328
},
"community|arabic_mmlu_ht:human_aging|0": {
"acc_norm": 0.5246636771300448,
"acc_norm_stderr": 0.03351695167652628
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"acc_norm": 0.5267175572519084,
"acc_norm_stderr": 0.04379024936553894
},
"community|arabic_mmlu_ht:international_law|0": {
"acc_norm": 0.6859504132231405,
"acc_norm_stderr": 0.042369647530410184
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"acc_norm": 0.5277777777777778,
"acc_norm_stderr": 0.04826217294139894
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"acc_norm": 0.5030674846625767,
"acc_norm_stderr": 0.03928297078179663
},
"community|arabic_mmlu_ht:machine_learning|0": {
"acc_norm": 0.36607142857142855,
"acc_norm_stderr": 0.0457237235873743
},
"community|arabic_mmlu_ht:management|0": {
"acc_norm": 0.5436893203883495,
"acc_norm_stderr": 0.049318019942204146
},
"community|arabic_mmlu_ht:marketing|0": {
"acc_norm": 0.6709401709401709,
"acc_norm_stderr": 0.03078232157768817
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"acc_norm": 0.5376033057851239,
"acc_norm_stderr": 0.010137255471213535
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"acc_norm": 0.5144508670520231,
"acc_norm_stderr": 0.026907849856282542
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"community|arabic_mmlu_ht:nutrition|0": {
"acc_norm": 0.545751633986928,
"acc_norm_stderr": 0.028509807802626595
},
"community|arabic_mmlu_ht:philosophy|0": {
"acc_norm": 0.5884244372990354,
"acc_norm_stderr": 0.02795048149440127
},
"community|arabic_mmlu_ht:prehistory|0": {
"acc_norm": 0.5216049382716049,
"acc_norm_stderr": 0.02779476010500874
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"acc_norm": 0.49778466991581743,
"acc_norm_stderr": 0.007442743888244289
},
"community|arabic_mmlu_ht:professional_law|0": {
"acc_norm": 0.4374608739201202,
"acc_norm_stderr": 0.0055511290313154955
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"acc_norm": 0.5186316432498472,
"acc_norm_stderr": 0.01235311899158185
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"acc_norm": 0.5221238938053098,
"acc_norm_stderr": 0.008440853668770152
},
"community|arabic_mmlu_ht:public_relations|0": {
"acc_norm": 0.4636363636363636,
"acc_norm_stderr": 0.047764491623961985
},
"community|arabic_mmlu_ht:security_studies|0": {
"acc_norm": 0.5020408163265306,
"acc_norm_stderr": 0.0320089533497105
},
"community|arabic_mmlu_ht:sociology|0": {
"acc_norm": 0.6716417910447762,
"acc_norm_stderr": 0.033206858897443244
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542129
},
"community|arabic_mmlu_ht:virology|0": {
"acc_norm": 0.42771084337349397,
"acc_norm_stderr": 0.03851597683718534
},
"community|arabic_mmlu_ht:world_religions|0": {
"acc_norm": 0.5906432748538012,
"acc_norm_stderr": 0.03771283107626544
},
"community|madinah_qa:Arabic Language (General)|0": {
"acc_norm": 0.46405228758169936,
"acc_norm_stderr": 0.020175488765484043
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"acc_norm": 0.336986301369863,
"acc_norm_stderr": 0.024775172239613903
},
"community|aratrust:Ethics|0": {
"acc_norm": 0.6333333333333333,
"acc_norm_stderr": 0.06273730842308473
},
"community|aratrust:Illegal|0": {
"acc_norm": 0.6981132075471698,
"acc_norm_stderr": 0.06366244470090368
},
"community|aratrust:MentalHealth|0": {
"acc_norm": 0.8026315789473685,
"acc_norm_stderr": 0.045958550517297296
},
"community|aratrust:Offensive|0": {
"acc_norm": 0.8115942028985508,
"acc_norm_stderr": 0.04742006474057419
},
"community|aratrust:PhysicalHealth|0": {
"acc_norm": 0.7671232876712328,
"acc_norm_stderr": 0.04981147084308546
},
"community|aratrust:Privacy|0": {
"acc_norm": 0.8596491228070176,
"acc_norm_stderr": 0.04641668966779981
},
"community|aratrust:Trustfulness|0": {
"acc_norm": 0.6794871794871795,
"acc_norm_stderr": 0.053182405107174306
},
"community|aratrust:Unfairness|0": {
"acc_norm": 0.7454545454545455,
"acc_norm_stderr": 0.059278386873217015
},
"community|alghafa:_average|0": {
"acc_norm": 0.4962665228816058,
"acc_norm_stderr": 0.022849644793311193
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.51970258074589,
"acc_norm_stderr": 0.03665251260253573
},
"community|arabic_mmlu_ht:_average|0": {
"acc_norm": 0.4799893688175857,
"acc_norm_stderr": 0.03308570309142766
},
"community|madinah_qa:_average|0": {
"acc_norm": 0.40051929447578116,
"acc_norm_stderr": 0.022475330502548973
},
"community|aratrust:_average|0": {
"acc_norm": 0.7496733072682997,
"acc_norm_stderr": 0.05355841510914207
},
"all": {
"acc_norm": 0.49404301593823985,
"acc_norm_stderr": 0.0332477131303554,
"f1": 0.7053860411193431,
"f1_stderr": 0.05588622012932623
},
"community|alrage_qa|0": {
"llm_as_judge": 0.5528490028490017,
"llm_as_judge_stderr": 0.00023093948894035965
}
},
"versions": {
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:Accounting (University)|0": 0,
"community|arabic_mmlu:Arabic Language (General)|0": 0,
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0,
"community|arabic_mmlu:Arabic Language (High School)|0": 0,
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0,
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0,
"community|arabic_mmlu:Biology (High School)|0": 0,
"community|arabic_mmlu:Civics (High School)|0": 0,
"community|arabic_mmlu:Civics (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (High School)|0": 0,
"community|arabic_mmlu:Computer Science (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (Primary School)|0": 0,
"community|arabic_mmlu:Computer Science (University)|0": 0,
"community|arabic_mmlu:Driving Test|0": 0,
"community|arabic_mmlu:Economics (High School)|0": 0,
"community|arabic_mmlu:Economics (Middle School)|0": 0,
"community|arabic_mmlu:Economics (University)|0": 0,
"community|arabic_mmlu:General Knowledge|0": 0,
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0,
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0,
"community|arabic_mmlu:Geography (High School)|0": 0,
"community|arabic_mmlu:Geography (Middle School)|0": 0,
"community|arabic_mmlu:Geography (Primary School)|0": 0,
"community|arabic_mmlu:History (High School)|0": 0,
"community|arabic_mmlu:History (Middle School)|0": 0,
"community|arabic_mmlu:History (Primary School)|0": 0,
"community|arabic_mmlu:Islamic Studies|0": 0,
"community|arabic_mmlu:Islamic Studies (High School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0,
"community|arabic_mmlu:Law (Professional)|0": 0,
"community|arabic_mmlu:Management (University)|0": 0,
"community|arabic_mmlu:Math (Primary School)|0": 0,
"community|arabic_mmlu:Natural Science (Middle School)|0": 0,
"community|arabic_mmlu:Natural Science (Primary School)|0": 0,
"community|arabic_mmlu:Philosophy (High School)|0": 0,
"community|arabic_mmlu:Physics (High School)|0": 0,
"community|arabic_mmlu:Political Science (University)|0": 0,
"community|arabic_mmlu:Social Science (Middle School)|0": 0,
"community|arabic_mmlu:Social Science (Primary School)|0": 0,
"community|arabic_mmlu_ht:abstract_algebra|0": 0,
"community|arabic_mmlu_ht:anatomy|0": 0,
"community|arabic_mmlu_ht:astronomy|0": 0,
"community|arabic_mmlu_ht:business_ethics|0": 0,
"community|arabic_mmlu_ht:clinical_knowledge|0": 0,
"community|arabic_mmlu_ht:college_biology|0": 0,
"community|arabic_mmlu_ht:college_chemistry|0": 0,
"community|arabic_mmlu_ht:college_computer_science|0": 0,
"community|arabic_mmlu_ht:college_mathematics|0": 0,
"community|arabic_mmlu_ht:college_medicine|0": 0,
"community|arabic_mmlu_ht:college_physics|0": 0,
"community|arabic_mmlu_ht:computer_security|0": 0,
"community|arabic_mmlu_ht:conceptual_physics|0": 0,
"community|arabic_mmlu_ht:econometrics|0": 0,
"community|arabic_mmlu_ht:electrical_engineering|0": 0,
"community|arabic_mmlu_ht:elementary_mathematics|0": 0,
"community|arabic_mmlu_ht:formal_logic|0": 0,
"community|arabic_mmlu_ht:global_facts|0": 0,
"community|arabic_mmlu_ht:high_school_biology|0": 0,
"community|arabic_mmlu_ht:high_school_chemistry|0": 0,
"community|arabic_mmlu_ht:high_school_computer_science|0": 0,
"community|arabic_mmlu_ht:high_school_european_history|0": 0,
"community|arabic_mmlu_ht:high_school_geography|0": 0,
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0,
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_mathematics|0": 0,
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_physics|0": 0,
"community|arabic_mmlu_ht:high_school_psychology|0": 0,
"community|arabic_mmlu_ht:high_school_statistics|0": 0,
"community|arabic_mmlu_ht:high_school_us_history|0": 0,
"community|arabic_mmlu_ht:high_school_world_history|0": 0,
"community|arabic_mmlu_ht:human_aging|0": 0,
"community|arabic_mmlu_ht:human_sexuality|0": 0,
"community|arabic_mmlu_ht:international_law|0": 0,
"community|arabic_mmlu_ht:jurisprudence|0": 0,
"community|arabic_mmlu_ht:logical_fallacies|0": 0,
"community|arabic_mmlu_ht:machine_learning|0": 0,
"community|arabic_mmlu_ht:management|0": 0,
"community|arabic_mmlu_ht:marketing|0": 0,
"community|arabic_mmlu_ht:medical_genetics|0": 0,
"community|arabic_mmlu_ht:miscellaneous|0": 0,
"community|arabic_mmlu_ht:moral_disputes|0": 0,
"community|arabic_mmlu_ht:moral_scenarios|0": 0,
"community|arabic_mmlu_ht:nutrition|0": 0,
"community|arabic_mmlu_ht:philosophy|0": 0,
"community|arabic_mmlu_ht:prehistory|0": 0,
"community|arabic_mmlu_ht:professional_accounting|0": 0,
"community|arabic_mmlu_ht:professional_law|0": 0,
"community|arabic_mmlu_ht:professional_medicine|0": 0,
"community|arabic_mmlu_ht:professional_psychology|0": 0,
"community|arabic_mmlu_ht:public_relations|0": 0,
"community|arabic_mmlu_ht:security_studies|0": 0,
"community|arabic_mmlu_ht:sociology|0": 0,
"community|arabic_mmlu_ht:us_foreign_policy|0": 0,
"community|arabic_mmlu_ht:virology|0": 0,
"community|arabic_mmlu_ht:world_religions|0": 0,
"community|aratrust:Ethics|0": 0,
"community|aratrust:Illegal|0": 0,
"community|aratrust:MentalHealth|0": 0,
"community|aratrust:Offensive|0": 0,
"community|aratrust:PhysicalHealth|0": 0,
"community|aratrust:Privacy|0": 0,
"community|aratrust:Trustfulness|0": 0,
"community|aratrust:Unfairness|0": 0,
"community|madinah_qa:Arabic Language (General)|0": 0,
"community|madinah_qa:Arabic Language (Grammar)|0": 0,
"community|alrage_qa|0": 0
},
"config_tasks": {
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams_pfn",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Accounting (University)": {
"name": "arabic_mmlu:Accounting (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Accounting (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 74,
"effective_num_docs": 74,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (General)": {
"name": "arabic_mmlu:Arabic Language (General)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)": {
"name": "arabic_mmlu:Arabic Language (Grammar)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (High School)": {
"name": "arabic_mmlu:Arabic Language (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)": {
"name": "arabic_mmlu:Arabic Language (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)": {
"name": "arabic_mmlu:Arabic Language (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 252,
"effective_num_docs": 252,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Biology (High School)": {
"name": "arabic_mmlu:Biology (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Biology (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1409,
"effective_num_docs": 1409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (High School)": {
"name": "arabic_mmlu:Civics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (Middle School)": {
"name": "arabic_mmlu:Civics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 236,
"effective_num_docs": 236,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (High School)": {
"name": "arabic_mmlu:Computer Science (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 261,
"effective_num_docs": 261,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Middle School)": {
"name": "arabic_mmlu:Computer Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Primary School)": {
"name": "arabic_mmlu:Computer Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 190,
"effective_num_docs": 190,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (University)": {
"name": "arabic_mmlu:Computer Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 64,
"effective_num_docs": 64,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Driving Test": {
"name": "arabic_mmlu:Driving Test",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Driving Test",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1211,
"effective_num_docs": 1211,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (High School)": {
"name": "arabic_mmlu:Economics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 360,
"effective_num_docs": 360,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (Middle School)": {
"name": "arabic_mmlu:Economics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (University)": {
"name": "arabic_mmlu:Economics (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 137,
"effective_num_docs": 137,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge": {
"name": "arabic_mmlu:General Knowledge",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 864,
"effective_num_docs": 864,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)": {
"name": "arabic_mmlu:General Knowledge (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 172,
"effective_num_docs": 172,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)": {
"name": "arabic_mmlu:General Knowledge (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 162,
"effective_num_docs": 162,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (High School)": {
"name": "arabic_mmlu:Geography (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1038,
"effective_num_docs": 1038,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Middle School)": {
"name": "arabic_mmlu:Geography (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Primary School)": {
"name": "arabic_mmlu:Geography (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (High School)": {
"name": "arabic_mmlu:History (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 760,
"effective_num_docs": 760,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Middle School)": {
"name": "arabic_mmlu:History (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Primary School)": {
"name": "arabic_mmlu:History (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies": {
"name": "arabic_mmlu:Islamic Studies",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 639,
"effective_num_docs": 639,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (High School)": {
"name": "arabic_mmlu:Islamic Studies (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 334,
"effective_num_docs": 334,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)": {
"name": "arabic_mmlu:Islamic Studies (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)": {
"name": "arabic_mmlu:Islamic Studies (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 999,
"effective_num_docs": 999,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Law (Professional)": {
"name": "arabic_mmlu:Law (Professional)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Law (Professional)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 314,
"effective_num_docs": 314,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Management (University)": {
"name": "arabic_mmlu:Management (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Management (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Math (Primary School)": {
"name": "arabic_mmlu:Math (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Math (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 409,
"effective_num_docs": 409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Middle School)": {
"name": "arabic_mmlu:Natural Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 242,
"effective_num_docs": 242,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Primary School)": {
"name": "arabic_mmlu:Natural Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 336,
"effective_num_docs": 336,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Philosophy (High School)": {
"name": "arabic_mmlu:Philosophy (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Philosophy (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 39,
"effective_num_docs": 39,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Physics (High School)": {
"name": "arabic_mmlu:Physics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Physics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Political Science (University)": {
"name": "arabic_mmlu:Political Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Political Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 210,
"effective_num_docs": 210,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Middle School)": {
"name": "arabic_mmlu:Social Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 241,
"effective_num_docs": 241,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Primary School)": {
"name": "arabic_mmlu:Social Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 705,
"effective_num_docs": 705,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:abstract_algebra": {
"name": "arabic_mmlu_ht:abstract_algebra",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "abstract_algebra",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:anatomy": {
"name": "arabic_mmlu_ht:anatomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "anatomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:astronomy": {
"name": "arabic_mmlu_ht:astronomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "astronomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:business_ethics": {
"name": "arabic_mmlu_ht:business_ethics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "business_ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:clinical_knowledge": {
"name": "arabic_mmlu_ht:clinical_knowledge",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "clinical_knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_biology": {
"name": "arabic_mmlu_ht:college_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_chemistry": {
"name": "arabic_mmlu_ht:college_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_computer_science": {
"name": "arabic_mmlu_ht:college_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_mathematics": {
"name": "arabic_mmlu_ht:college_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_medicine": {
"name": "arabic_mmlu_ht:college_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_physics": {
"name": "arabic_mmlu_ht:college_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:computer_security": {
"name": "arabic_mmlu_ht:computer_security",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "computer_security",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:conceptual_physics": {
"name": "arabic_mmlu_ht:conceptual_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "conceptual_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:econometrics": {
"name": "arabic_mmlu_ht:econometrics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "econometrics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:electrical_engineering": {
"name": "arabic_mmlu_ht:electrical_engineering",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "electrical_engineering",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:elementary_mathematics": {
"name": "arabic_mmlu_ht:elementary_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "elementary_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:formal_logic": {
"name": "arabic_mmlu_ht:formal_logic",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "formal_logic",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:global_facts": {
"name": "arabic_mmlu_ht:global_facts",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "global_facts",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_biology": {
"name": "arabic_mmlu_ht:high_school_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3813,
"effective_num_docs": 3813,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_chemistry": {
"name": "arabic_mmlu_ht:high_school_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4016,
"effective_num_docs": 4016,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_computer_science": {
"name": "arabic_mmlu_ht:high_school_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_european_history": {
"name": "arabic_mmlu_ht:high_school_european_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_european_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 8152,
"effective_num_docs": 8152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_geography": {
"name": "arabic_mmlu_ht:high_school_geography",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_geography",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics": {
"name": "arabic_mmlu_ht:high_school_government_and_politics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_government_and_politics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics": {
"name": "arabic_mmlu_ht:high_school_macroeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_macroeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2891,
"effective_num_docs": 2891,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_mathematics": {
"name": "arabic_mmlu_ht:high_school_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics": {
"name": "arabic_mmlu_ht:high_school_microeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_microeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_physics": {
"name": "arabic_mmlu_ht:high_school_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_psychology": {
"name": "arabic_mmlu_ht:high_school_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_statistics": {
"name": "arabic_mmlu_ht:high_school_statistics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_statistics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4232,
"effective_num_docs": 4232,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_us_history": {
"name": "arabic_mmlu_ht:high_school_us_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_us_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_world_history": {
"name": "arabic_mmlu_ht:high_school_world_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_world_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_aging": {
"name": "arabic_mmlu_ht:human_aging",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_aging",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_sexuality": {
"name": "arabic_mmlu_ht:human_sexuality",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_sexuality",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:international_law": {
"name": "arabic_mmlu_ht:international_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "international_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:jurisprudence": {
"name": "arabic_mmlu_ht:jurisprudence",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "jurisprudence",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:logical_fallacies": {
"name": "arabic_mmlu_ht:logical_fallacies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "logical_fallacies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:machine_learning": {
"name": "arabic_mmlu_ht:machine_learning",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "machine_learning",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:management": {
"name": "arabic_mmlu_ht:management",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "management",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:marketing": {
"name": "arabic_mmlu_ht:marketing",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "marketing",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:medical_genetics": {
"name": "arabic_mmlu_ht:medical_genetics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "medical_genetics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:miscellaneous": {
"name": "arabic_mmlu_ht:miscellaneous",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "miscellaneous",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2420,
"effective_num_docs": 2420,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_disputes": {
"name": "arabic_mmlu_ht:moral_disputes",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_disputes",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_scenarios": {
"name": "arabic_mmlu_ht:moral_scenarios",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_scenarios",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:nutrition": {
"name": "arabic_mmlu_ht:nutrition",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "nutrition",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:philosophy": {
"name": "arabic_mmlu_ht:philosophy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "philosophy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:prehistory": {
"name": "arabic_mmlu_ht:prehistory",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "prehistory",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_accounting": {
"name": "arabic_mmlu_ht:professional_accounting",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_accounting",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4514,
"effective_num_docs": 4514,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_law": {
"name": "arabic_mmlu_ht:professional_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7987,
"effective_num_docs": 7987,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_medicine": {
"name": "arabic_mmlu_ht:professional_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1637,
"effective_num_docs": 1637,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_psychology": {
"name": "arabic_mmlu_ht:professional_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3503,
"effective_num_docs": 3503,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:public_relations": {
"name": "arabic_mmlu_ht:public_relations",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "public_relations",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:security_studies": {
"name": "arabic_mmlu_ht:security_studies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "security_studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:sociology": {
"name": "arabic_mmlu_ht:sociology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "sociology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:us_foreign_policy": {
"name": "arabic_mmlu_ht:us_foreign_policy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "us_foreign_policy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:virology": {
"name": "arabic_mmlu_ht:virology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "virology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:world_religions": {
"name": "arabic_mmlu_ht:world_religions",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "world_religions",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Ethics": {
"name": "aratrust:Ethics",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 60,
"effective_num_docs": 60,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Illegal": {
"name": "aratrust:Illegal",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Illegal",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 53,
"effective_num_docs": 53,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:MentalHealth": {
"name": "aratrust:MentalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "MentalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 76,
"effective_num_docs": 76,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Offensive": {
"name": "aratrust:Offensive",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Offensive",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 69,
"effective_num_docs": 69,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:PhysicalHealth": {
"name": "aratrust:PhysicalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "PhysicalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 73,
"effective_num_docs": 73,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Privacy": {
"name": "aratrust:Privacy",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Privacy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Trustfulness": {
"name": "aratrust:Trustfulness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Trustfulness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 78,
"effective_num_docs": 78,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Unfairness": {
"name": "aratrust:Unfairness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Unfairness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 55,
"effective_num_docs": 55,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (General)": {
"name": "madinah_qa:Arabic Language (General)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (Grammar)": {
"name": "madinah_qa:Arabic Language (Grammar)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alrage_qa": {
"name": "alrage_qa",
"prompt_function": "qa_prompt_arabic",
"hf_repo": "OALL/ALRAGE",
"hf_subset": null,
"metric": [
{
"metric_name": "llm_as_judge",
"higher_is_better": true,
"category": "7",
"use_case": "10",
"sample_level_fn": "_sample_level_fn",
"corpus_level_fn": "aggregate_scores"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 200,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2106,
"effective_num_docs": 2106,
"must_remove_duplicate_docs": false,
"version": 0
}
},
"summary_tasks": {
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "c07a5e78c5c0b8fe",
"hash_input_tokens": "dc4c7dbe99a8f0ba",
"hash_cont_tokens": "ad50ef886e4badb6"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "c0b6081f83e14064",
"hash_input_tokens": "d92dd364448a4767",
"hash_cont_tokens": "11856103e53a468a"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "64eb78a7c5b7484b",
"hash_input_tokens": "dbf7e06e25c8ba6e",
"hash_cont_tokens": "641fc8b57ef21d8e"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "54fc3502c1c02c06",
"hash_input_tokens": "e6bd5d76a64fa9b5",
"hash_cont_tokens": "70c2b257905a8421"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "46572d83696552ae",
"hash_input_tokens": "99577b964baa14bf",
"hash_cont_tokens": "be76e78732ef0009"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "f430d97ff715bc1c",
"hash_input_tokens": "989f0efc00ea0be8",
"hash_cont_tokens": "da5edd63a89b1138"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "6b70a7416584f98c",
"hash_input_tokens": "b7d435df48a3233a",
"hash_cont_tokens": "9fbeea076496099e"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15633,
"non_padded": 357,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "bc2005cc9d2f436e",
"hash_input_tokens": "cf8d0593905fe5ca",
"hash_cont_tokens": "796d63ec481ad55a"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17201,
"non_padded": 784,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "6fb0e254ea5945d8",
"hash_input_tokens": "6889045e7fe0a97d",
"hash_cont_tokens": "2e9ccdab90f54566"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5031,
"non_padded": 129,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "6d721df351722656",
"hash_input_tokens": "0d613555025e9db0",
"hash_cont_tokens": "cd403399a6437d4c"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2084,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Accounting (University)|0": {
"hashes": {
"hash_examples": "30e09697562ff9e7",
"hash_full_prompts": "30e09697562ff9e7",
"hash_input_tokens": "bea85907306d7ca6",
"hash_cont_tokens": "b9fdce757fa0e0b5"
},
"truncated": 0,
"non_truncated": 74,
"padded": 252,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "bef69fb8b3b75f28",
"hash_full_prompts": "bef69fb8b3b75f28",
"hash_input_tokens": "dd8ae6f2d8499d92",
"hash_cont_tokens": "0ad97638daccd837"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2393,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "bd066a9e6a140a4b",
"hash_full_prompts": "bd066a9e6a140a4b",
"hash_input_tokens": "6c5df3aa3d704a16",
"hash_cont_tokens": "b2b42393d50c7a40"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1572,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"hashes": {
"hash_examples": "a9c2cd9a9929292a",
"hash_full_prompts": "a9c2cd9a9929292a",
"hash_input_tokens": "37915f48499a5aa1",
"hash_cont_tokens": "c7952627e22bd429"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1477,
"non_padded": 48,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"hashes": {
"hash_examples": "2f8a77bbbd0e21ff",
"hash_full_prompts": "2f8a77bbbd0e21ff",
"hash_input_tokens": "1765ae27bfdd56e4",
"hash_cont_tokens": "e0675947a1769915"
},
"truncated": 0,
"non_truncated": 27,
"padded": 105,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"hashes": {
"hash_examples": "5eed3da47822539b",
"hash_full_prompts": "5eed3da47822539b",
"hash_input_tokens": "62c2f21e8c7f1a31",
"hash_cont_tokens": "532da4ef100771b5"
},
"truncated": 0,
"non_truncated": 252,
"padded": 912,
"non_padded": 14,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Biology (High School)|0": {
"hashes": {
"hash_examples": "91ae6d22a0f0213d",
"hash_full_prompts": "91ae6d22a0f0213d",
"hash_input_tokens": "767657fe6c71aa1c",
"hash_cont_tokens": "5c1b9d95d3ef90f1"
},
"truncated": 0,
"non_truncated": 1409,
"padded": 4932,
"non_padded": 124,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (High School)|0": {
"hashes": {
"hash_examples": "f27bf8791bea2bb9",
"hash_full_prompts": "f27bf8791bea2bb9",
"hash_input_tokens": "92d6ee73dfa3f8d5",
"hash_cont_tokens": "af4dfe07e12b339c"
},
"truncated": 0,
"non_truncated": 87,
"padded": 312,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"hashes": {
"hash_examples": "74f5bb0098c8916f",
"hash_full_prompts": "74f5bb0098c8916f",
"hash_input_tokens": "b7b952cd83bae24d",
"hash_cont_tokens": "a8abefb3f54002fa"
},
"truncated": 0,
"non_truncated": 236,
"padded": 912,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"hashes": {
"hash_examples": "a4278d7b525d46fe",
"hash_full_prompts": "a4278d7b525d46fe",
"hash_input_tokens": "e796ee7f93162553",
"hash_cont_tokens": "305fa7f1176c0a96"
},
"truncated": 0,
"non_truncated": 261,
"padded": 966,
"non_padded": 40,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"hashes": {
"hash_examples": "0cb6c07e4b80dfd4",
"hash_full_prompts": "0cb6c07e4b80dfd4",
"hash_input_tokens": "db5af6ce2a8e2736",
"hash_cont_tokens": "7057adc8d70acae8"
},
"truncated": 0,
"non_truncated": 27,
"padded": 92,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"hashes": {
"hash_examples": "d96fc1bc32473533",
"hash_full_prompts": "d96fc1bc32473533",
"hash_input_tokens": "e4f6c076aa5cd97c",
"hash_cont_tokens": "f243dc70d7a662b6"
},
"truncated": 0,
"non_truncated": 190,
"padded": 476,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (University)|0": {
"hashes": {
"hash_examples": "8835587e436cbaff",
"hash_full_prompts": "8835587e436cbaff",
"hash_input_tokens": "659eb9beeb5c2487",
"hash_cont_tokens": "622c2a9754ec4992"
},
"truncated": 0,
"non_truncated": 64,
"padded": 251,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Driving Test|0": {
"hashes": {
"hash_examples": "7a4c38a2c451d075",
"hash_full_prompts": "7a4c38a2c451d075",
"hash_input_tokens": "3a26c453f673a97f",
"hash_cont_tokens": "fd342bd446c9f657"
},
"truncated": 0,
"non_truncated": 1211,
"padded": 3548,
"non_padded": 137,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (High School)|0": {
"hashes": {
"hash_examples": "c04c252836601279",
"hash_full_prompts": "c04c252836601279",
"hash_input_tokens": "becba93464d2f2e4",
"hash_cont_tokens": "11b1f2af01d8f664"
},
"truncated": 0,
"non_truncated": 360,
"padded": 1350,
"non_padded": 40,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"hashes": {
"hash_examples": "18fba1579406b3cc",
"hash_full_prompts": "18fba1579406b3cc",
"hash_input_tokens": "1902cbb716ba2a11",
"hash_cont_tokens": "a1c0249e1d783d40"
},
"truncated": 0,
"non_truncated": 87,
"padded": 332,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (University)|0": {
"hashes": {
"hash_examples": "7c9e86fba8151562",
"hash_full_prompts": "7c9e86fba8151562",
"hash_input_tokens": "68631263edf80a23",
"hash_cont_tokens": "43716155729a2357"
},
"truncated": 0,
"non_truncated": 137,
"padded": 536,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge|0": {
"hashes": {
"hash_examples": "acfbe4e1f0314b85",
"hash_full_prompts": "acfbe4e1f0314b85",
"hash_input_tokens": "ef3b969cfe5f8c4c",
"hash_cont_tokens": "0515b7d769710412"
},
"truncated": 0,
"non_truncated": 864,
"padded": 3112,
"non_padded": 101,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"hashes": {
"hash_examples": "03cd0ecf10224316",
"hash_full_prompts": "03cd0ecf10224316",
"hash_input_tokens": "6f822ab6e96e8397",
"hash_cont_tokens": "3b575fe4c129b755"
},
"truncated": 0,
"non_truncated": 172,
"padded": 607,
"non_padded": 21,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"hashes": {
"hash_examples": "c3ee30196e05e122",
"hash_full_prompts": "c3ee30196e05e122",
"hash_input_tokens": "a60d5c3fc9120ca2",
"hash_cont_tokens": "39d764dbe032cf64"
},
"truncated": 0,
"non_truncated": 162,
"padded": 609,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (High School)|0": {
"hashes": {
"hash_examples": "e2e329d2bdd9fb7b",
"hash_full_prompts": "e2e329d2bdd9fb7b",
"hash_input_tokens": "7be69a5255f1fa89",
"hash_cont_tokens": "81fc08ebe2325b45"
},
"truncated": 0,
"non_truncated": 1038,
"padded": 3960,
"non_padded": 156,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"hashes": {
"hash_examples": "420b161444291989",
"hash_full_prompts": "420b161444291989",
"hash_input_tokens": "8aaa0d472f6f31b2",
"hash_cont_tokens": "1d8526a8e3525ac7"
},
"truncated": 0,
"non_truncated": 272,
"padded": 936,
"non_padded": 39,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"hashes": {
"hash_examples": "5bc5ca48a4210899",
"hash_full_prompts": "5bc5ca48a4210899",
"hash_input_tokens": "5ec60cd717a7f991",
"hash_cont_tokens": "e29f9ba389dbecad"
},
"truncated": 0,
"non_truncated": 57,
"padded": 216,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (High School)|0": {
"hashes": {
"hash_examples": "c7cc37f29311bea1",
"hash_full_prompts": "c7cc37f29311bea1",
"hash_input_tokens": "e41ecb3afbe28485",
"hash_cont_tokens": "5cc4ad52f8bbceda"
},
"truncated": 0,
"non_truncated": 760,
"padded": 2826,
"non_padded": 136,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Middle School)|0": {
"hashes": {
"hash_examples": "5b9f1973337153a2",
"hash_full_prompts": "5b9f1973337153a2",
"hash_input_tokens": "4641caeaa1752099",
"hash_cont_tokens": "db8951a6b39b43cb"
},
"truncated": 0,
"non_truncated": 203,
"padded": 722,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Primary School)|0": {
"hashes": {
"hash_examples": "af2469847007c1fe",
"hash_full_prompts": "af2469847007c1fe",
"hash_input_tokens": "f8f6f9853a7da814",
"hash_cont_tokens": "a5ea7c8630fc7498"
},
"truncated": 0,
"non_truncated": 102,
"padded": 392,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies|0": {
"hashes": {
"hash_examples": "c8da9b2f16a5ea0f",
"hash_full_prompts": "c8da9b2f16a5ea0f",
"hash_input_tokens": "1755b255f6954d72",
"hash_cont_tokens": "c3cb3f831a3a523c"
},
"truncated": 0,
"non_truncated": 639,
"padded": 2469,
"non_padded": 60,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"hashes": {
"hash_examples": "efb11bc8ef398117",
"hash_full_prompts": "efb11bc8ef398117",
"hash_input_tokens": "882634fc2ad7526b",
"hash_cont_tokens": "c943e3ef46640a33"
},
"truncated": 0,
"non_truncated": 334,
"padded": 1257,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"hashes": {
"hash_examples": "9e33ab030eebdb99",
"hash_full_prompts": "9e33ab030eebdb99",
"hash_input_tokens": "dfa959f6620c738f",
"hash_cont_tokens": "04d3cf3eb50b6a87"
},
"truncated": 0,
"non_truncated": 238,
"padded": 851,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"hashes": {
"hash_examples": "4167565d878b20eb",
"hash_full_prompts": "4167565d878b20eb",
"hash_input_tokens": "d61d048d6ab1c868",
"hash_cont_tokens": "7abd556988f36623"
},
"truncated": 0,
"non_truncated": 999,
"padded": 2979,
"non_padded": 45,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Law (Professional)|0": {
"hashes": {
"hash_examples": "e77f52c8fe4352b3",
"hash_full_prompts": "e77f52c8fe4352b3",
"hash_input_tokens": "681367ab669f0814",
"hash_cont_tokens": "3a938176e614a705"
},
"truncated": 0,
"non_truncated": 314,
"padded": 1210,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Management (University)|0": {
"hashes": {
"hash_examples": "09682649b04b7327",
"hash_full_prompts": "09682649b04b7327",
"hash_input_tokens": "83309624d160f5f2",
"hash_cont_tokens": "4355754fb9b395f1"
},
"truncated": 0,
"non_truncated": 75,
"padded": 196,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Math (Primary School)|0": {
"hashes": {
"hash_examples": "edb027bfae7e76f1",
"hash_full_prompts": "edb027bfae7e76f1",
"hash_input_tokens": "7320bdbbd2d71c98",
"hash_cont_tokens": "d17e44f35c7daef9"
},
"truncated": 0,
"non_truncated": 409,
"padded": 1273,
"non_padded": 23,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"hashes": {
"hash_examples": "96e72c9094c2364c",
"hash_full_prompts": "96e72c9094c2364c",
"hash_input_tokens": "36e5251118eb863d",
"hash_cont_tokens": "6395fa16036a74a6"
},
"truncated": 0,
"non_truncated": 242,
"padded": 900,
"non_padded": 40,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"hashes": {
"hash_examples": "69e35bad3dec5a4d",
"hash_full_prompts": "69e35bad3dec5a4d",
"hash_input_tokens": "300a567224c12076",
"hash_cont_tokens": "9947a0f508d1b991"
},
"truncated": 0,
"non_truncated": 336,
"padded": 1216,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"hashes": {
"hash_examples": "dc6ebd484a02fca5",
"hash_full_prompts": "dc6ebd484a02fca5",
"hash_input_tokens": "dba218cf25b3eafd",
"hash_cont_tokens": "713a5834636f4b0a"
},
"truncated": 0,
"non_truncated": 39,
"padded": 156,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Physics (High School)|0": {
"hashes": {
"hash_examples": "58a1722472c9e644",
"hash_full_prompts": "58a1722472c9e644",
"hash_input_tokens": "304e54edfd6f0f58",
"hash_cont_tokens": "dfc88c66832d89f6"
},
"truncated": 0,
"non_truncated": 255,
"padded": 1000,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Political Science (University)|0": {
"hashes": {
"hash_examples": "07a4ed6aabbdfd1e",
"hash_full_prompts": "07a4ed6aabbdfd1e",
"hash_input_tokens": "efd46eb80580299d",
"hash_cont_tokens": "fa92540465daf162"
},
"truncated": 0,
"non_truncated": 210,
"padded": 693,
"non_padded": 17,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"hashes": {
"hash_examples": "8ca955902f304664",
"hash_full_prompts": "8ca955902f304664",
"hash_input_tokens": "98f480ce0d958cdc",
"hash_cont_tokens": "669ee413663be3c9"
},
"truncated": 0,
"non_truncated": 241,
"padded": 889,
"non_padded": 40,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"hashes": {
"hash_examples": "934025ab3738123c",
"hash_full_prompts": "934025ab3738123c",
"hash_input_tokens": "f6e672f80b72f021",
"hash_cont_tokens": "ddc6ab0a1f14ad8d"
},
"truncated": 0,
"non_truncated": 705,
"padded": 2019,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"hashes": {
"hash_examples": "0b557911f2f6d919",
"hash_full_prompts": "0b557911f2f6d919",
"hash_input_tokens": "2b530b9298624e0d",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 392,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:anatomy|0": {
"hashes": {
"hash_examples": "a552d8a0ef294061",
"hash_full_prompts": "a552d8a0ef294061",
"hash_input_tokens": "e5cb9a42f9eb3d46",
"hash_cont_tokens": "4122c22f70611a71"
},
"truncated": 0,
"non_truncated": 135,
"padded": 524,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:astronomy|0": {
"hashes": {
"hash_examples": "c4a372d0af7da098",
"hash_full_prompts": "c4a372d0af7da098",
"hash_input_tokens": "ecf7c4ae0955c1b4",
"hash_cont_tokens": "9e115bbb4624dab2"
},
"truncated": 0,
"non_truncated": 152,
"padded": 608,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:business_ethics|0": {
"hashes": {
"hash_examples": "9f71d816abf8af7a",
"hash_full_prompts": "9f71d816abf8af7a",
"hash_input_tokens": "44eb6b37241c76c7",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 392,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"hashes": {
"hash_examples": "38303cd765589ef3",
"hash_full_prompts": "38303cd765589ef3",
"hash_input_tokens": "cc2d11ce76f97105",
"hash_cont_tokens": "dfb057c876d7b7d1"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1044,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_biology|0": {
"hashes": {
"hash_examples": "dbd9b5d318e60b04",
"hash_full_prompts": "dbd9b5d318e60b04",
"hash_input_tokens": "4f832db633bf98c3",
"hash_cont_tokens": "71fe5c602815d71c"
},
"truncated": 0,
"non_truncated": 144,
"padded": 564,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"hashes": {
"hash_examples": "6f88491d03db8a4c",
"hash_full_prompts": "6f88491d03db8a4c",
"hash_input_tokens": "2b490813486b7bae",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 388,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"hashes": {
"hash_examples": "ebfdee5ef2ed5e17",
"hash_full_prompts": "ebfdee5ef2ed5e17",
"hash_input_tokens": "a4149e8b57eb9d4d",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"hashes": {
"hash_examples": "e3f22cd7712aae2f",
"hash_full_prompts": "e3f22cd7712aae2f",
"hash_input_tokens": "bd4340372ca065b8",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 392,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_medicine|0": {
"hashes": {
"hash_examples": "51a5501373afb5a7",
"hash_full_prompts": "51a5501373afb5a7",
"hash_input_tokens": "68e56ebb78f37a19",
"hash_cont_tokens": "67dc34623443a1e7"
},
"truncated": 0,
"non_truncated": 173,
"padded": 684,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_physics|0": {
"hashes": {
"hash_examples": "2d3e015989b108db",
"hash_full_prompts": "2d3e015989b108db",
"hash_input_tokens": "3de530890d504f83",
"hash_cont_tokens": "c3d82396534571b1"
},
"truncated": 0,
"non_truncated": 102,
"padded": 404,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:computer_security|0": {
"hashes": {
"hash_examples": "f8810eddc38dfee4",
"hash_full_prompts": "f8810eddc38dfee4",
"hash_input_tokens": "0a10af04924b6443",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"hashes": {
"hash_examples": "211e32cc43c6b1dc",
"hash_full_prompts": "211e32cc43c6b1dc",
"hash_input_tokens": "80b94330f5834729",
"hash_cont_tokens": "528cfb1d10170a06"
},
"truncated": 0,
"non_truncated": 235,
"padded": 908,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:econometrics|0": {
"hashes": {
"hash_examples": "810023786b2484d2",
"hash_full_prompts": "810023786b2484d2",
"hash_input_tokens": "ee5839d6b1bc2c70",
"hash_cont_tokens": "a24a775906af93a7"
},
"truncated": 0,
"non_truncated": 114,
"padded": 456,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"hashes": {
"hash_examples": "a222760c93eaa1ee",
"hash_full_prompts": "a222760c93eaa1ee",
"hash_input_tokens": "9d01afe8249ee3a1",
"hash_cont_tokens": "67859ce4caabcbf6"
},
"truncated": 0,
"non_truncated": 145,
"padded": 552,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"hashes": {
"hash_examples": "4c069aeee64dc227",
"hash_full_prompts": "4c069aeee64dc227",
"hash_input_tokens": "0c5c8a3ff337940e",
"hash_cont_tokens": "4db107de29530e88"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1476,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:formal_logic|0": {
"hashes": {
"hash_examples": "3cb0ccbf8e8a77ae",
"hash_full_prompts": "3cb0ccbf8e8a77ae",
"hash_input_tokens": "ee90091228927cf9",
"hash_cont_tokens": "7e8a9a604a690f7d"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:global_facts|0": {
"hashes": {
"hash_examples": "c1d039e64ea321b9",
"hash_full_prompts": "c1d039e64ea321b9",
"hash_input_tokens": "71a587a1b55ec538",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"hashes": {
"hash_examples": "ddcb8237bb4ba08a",
"hash_full_prompts": "ddcb8237bb4ba08a",
"hash_input_tokens": "fa78396da5d13b5b",
"hash_cont_tokens": "0efe1e830248e6bd"
},
"truncated": 0,
"non_truncated": 3813,
"padded": 14904,
"non_padded": 348,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"hashes": {
"hash_examples": "07061b55c5c436d9",
"hash_full_prompts": "07061b55c5c436d9",
"hash_input_tokens": "117af459d8ec3e05",
"hash_cont_tokens": "7d8f7102fdcdee06"
},
"truncated": 0,
"non_truncated": 4016,
"padded": 15700,
"non_padded": 364,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"hashes": {
"hash_examples": "8d3405483d5fdcff",
"hash_full_prompts": "8d3405483d5fdcff",
"hash_input_tokens": "504338b75cdf4c3b",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"hashes": {
"hash_examples": "031c49a430356414",
"hash_full_prompts": "031c49a430356414",
"hash_input_tokens": "5d62415e4f700665",
"hash_cont_tokens": "7065798e4be561c9"
},
"truncated": 0,
"non_truncated": 8152,
"padded": 32236,
"non_padded": 372,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"hashes": {
"hash_examples": "d0ce2b019a66c1de",
"hash_full_prompts": "d0ce2b019a66c1de",
"hash_input_tokens": "b5bee9de37f7654b",
"hash_cont_tokens": "7dcb2b397f2899dc"
},
"truncated": 0,
"non_truncated": 198,
"padded": 772,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "7d7c6d476d0576b1",
"hash_full_prompts": "7d7c6d476d0576b1",
"hash_input_tokens": "140d7f3db8348d25",
"hash_cont_tokens": "1e5c1142db714c00"
},
"truncated": 0,
"non_truncated": 193,
"padded": 768,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "694d3a01c6144ddb",
"hash_full_prompts": "694d3a01c6144ddb",
"hash_input_tokens": "05212801185fd434",
"hash_cont_tokens": "52ef1df59ddc4b6e"
},
"truncated": 0,
"non_truncated": 2891,
"padded": 11268,
"non_padded": 296,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"hashes": {
"hash_examples": "004f9c0a40b5ec10",
"hash_full_prompts": "004f9c0a40b5ec10",
"hash_input_tokens": "fd16e320268e03bb",
"hash_cont_tokens": "8e39aba941948a67"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1072,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "80cf03d462e6ccbc",
"hash_full_prompts": "80cf03d462e6ccbc",
"hash_input_tokens": "4273fc10342c34bb",
"hash_cont_tokens": "fb290d2239a20213"
},
"truncated": 0,
"non_truncated": 238,
"padded": 948,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"hashes": {
"hash_examples": "92218def5b383845",
"hash_full_prompts": "92218def5b383845",
"hash_input_tokens": "631eab03b1e51918",
"hash_cont_tokens": "85a8c5003a5c3167"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"hashes": {
"hash_examples": "323f7848fee32e58",
"hash_full_prompts": "323f7848fee32e58",
"hash_input_tokens": "e416d2f70bbbf03d",
"hash_cont_tokens": "879a27397842c6ca"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2128,
"non_padded": 52,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"hashes": {
"hash_examples": "d7bbe0d037cf31ec",
"hash_full_prompts": "d7bbe0d037cf31ec",
"hash_input_tokens": "18a4db6fed450646",
"hash_cont_tokens": "10dc12ad013a60b0"
},
"truncated": 0,
"non_truncated": 4232,
"padded": 16564,
"non_padded": 364,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"hashes": {
"hash_examples": "722ec9207e3b0e04",
"hash_full_prompts": "722ec9207e3b0e04",
"hash_input_tokens": "06c188adee48488a",
"hash_cont_tokens": "b1b9db54238dfbb8"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"hashes": {
"hash_examples": "b5eb675d3b578584",
"hash_full_prompts": "b5eb675d3b578584",
"hash_input_tokens": "bfe8192e330f4d3b",
"hash_cont_tokens": "9ccd5f151ae77cab"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_aging|0": {
"hashes": {
"hash_examples": "713ac79cd2dd2d7b",
"hash_full_prompts": "713ac79cd2dd2d7b",
"hash_input_tokens": "e138d539ad0aeadd",
"hash_cont_tokens": "a2becbf3e3e77f4f"
},
"truncated": 0,
"non_truncated": 223,
"padded": 848,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"hashes": {
"hash_examples": "47551ab4e5dcf6c5",
"hash_full_prompts": "47551ab4e5dcf6c5",
"hash_input_tokens": "16917f5188323f45",
"hash_cont_tokens": "0fa1ff2729277dca"
},
"truncated": 0,
"non_truncated": 131,
"padded": 504,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:international_law|0": {
"hashes": {
"hash_examples": "da360336943398d5",
"hash_full_prompts": "da360336943398d5",
"hash_input_tokens": "a834bd7cb9fb84b0",
"hash_cont_tokens": "58257f147c7dcefa"
},
"truncated": 0,
"non_truncated": 121,
"padded": 480,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"hashes": {
"hash_examples": "661d161a486fb035",
"hash_full_prompts": "661d161a486fb035",
"hash_input_tokens": "273f7f10975999a5",
"hash_cont_tokens": "693ea75cc37656ed"
},
"truncated": 0,
"non_truncated": 108,
"padded": 424,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"hashes": {
"hash_examples": "5c3926384758bda7",
"hash_full_prompts": "5c3926384758bda7",
"hash_input_tokens": "8980627e551ac6b7",
"hash_cont_tokens": "239166980021cf9f"
},
"truncated": 0,
"non_truncated": 163,
"padded": 644,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:machine_learning|0": {
"hashes": {
"hash_examples": "3ce756e6a22ffc48",
"hash_full_prompts": "3ce756e6a22ffc48",
"hash_input_tokens": "3068ec1db6841f93",
"hash_cont_tokens": "6bce033cb0089aa1"
},
"truncated": 0,
"non_truncated": 112,
"padded": 444,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:management|0": {
"hashes": {
"hash_examples": "20fe769bb3276832",
"hash_full_prompts": "20fe769bb3276832",
"hash_input_tokens": "f3d9a1c2e430a776",
"hash_cont_tokens": "e7399dc664d5411e"
},
"truncated": 0,
"non_truncated": 103,
"padded": 392,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:marketing|0": {
"hashes": {
"hash_examples": "6b19449559d987ce",
"hash_full_prompts": "6b19449559d987ce",
"hash_input_tokens": "8af701091ddbb2ea",
"hash_cont_tokens": "edca4400522e2a1c"
},
"truncated": 0,
"non_truncated": 234,
"padded": 916,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"hashes": {
"hash_examples": "cbb0fa9df0f5435a",
"hash_full_prompts": "cbb0fa9df0f5435a",
"hash_input_tokens": "ebe777a5e07f8e21",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"hashes": {
"hash_examples": "0a4134046c23cff9",
"hash_full_prompts": "0a4134046c23cff9",
"hash_input_tokens": "2854447dcb23b4cb",
"hash_cont_tokens": "d981b1fa928a1f6d"
},
"truncated": 0,
"non_truncated": 2420,
"padded": 9396,
"non_padded": 284,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"hashes": {
"hash_examples": "1ac8a0967c82caa0",
"hash_full_prompts": "1ac8a0967c82caa0",
"hash_input_tokens": "fdb38931eaad229b",
"hash_cont_tokens": "56f6132ef0181ee7"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1376,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"hashes": {
"hash_examples": "2c0670188bc5a789",
"hash_full_prompts": "2c0670188bc5a789",
"hash_input_tokens": "c4f8e453da718f17",
"hash_cont_tokens": "4f0f1c52461a0fbb"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:nutrition|0": {
"hashes": {
"hash_examples": "658628c0dcdfe201",
"hash_full_prompts": "658628c0dcdfe201",
"hash_input_tokens": "0ec88e8a40755b54",
"hash_cont_tokens": "7481b847aa476714"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1212,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:philosophy|0": {
"hashes": {
"hash_examples": "8b6707b322affafd",
"hash_full_prompts": "8b6707b322affafd",
"hash_input_tokens": "f286157dab29855b",
"hash_cont_tokens": "6dca32c173424c87"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1216,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:prehistory|0": {
"hashes": {
"hash_examples": "0c85ffcdc9a7b367",
"hash_full_prompts": "0c85ffcdc9a7b367",
"hash_input_tokens": "f2238f6948c15b99",
"hash_cont_tokens": "30fed1f015e456bc"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1278,
"non_padded": 18,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"hashes": {
"hash_examples": "cce1ea2d5f544b2f",
"hash_full_prompts": "cce1ea2d5f544b2f",
"hash_input_tokens": "868e4cbaceadb85e",
"hash_cont_tokens": "e061e57ab7ea7c3e"
},
"truncated": 0,
"non_truncated": 4514,
"padded": 17516,
"non_padded": 540,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_law|0": {
"hashes": {
"hash_examples": "1c654b024b54eb4b",
"hash_full_prompts": "1c654b024b54eb4b",
"hash_input_tokens": "5b55479a2f79e598",
"hash_cont_tokens": "9b17d1196f37f64e"
},
"truncated": 0,
"non_truncated": 7987,
"padded": 31341,
"non_padded": 607,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"hashes": {
"hash_examples": "c621eaacfa662ebc",
"hash_full_prompts": "c621eaacfa662ebc",
"hash_input_tokens": "eaa063b5021ce224",
"hash_cont_tokens": "b1f3abcac825e9c1"
},
"truncated": 0,
"non_truncated": 1637,
"padded": 6320,
"non_padded": 228,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"hashes": {
"hash_examples": "bc14a28eaec87dc4",
"hash_full_prompts": "bc14a28eaec87dc4",
"hash_input_tokens": "8b6a8e6ee8bccb8f",
"hash_cont_tokens": "751f0e72cd8b5d53"
},
"truncated": 0,
"non_truncated": 3503,
"padded": 13516,
"non_padded": 496,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:public_relations|0": {
"hashes": {
"hash_examples": "de4989d9375885c4",
"hash_full_prompts": "de4989d9375885c4",
"hash_input_tokens": "44e8ca418257cf1d",
"hash_cont_tokens": "d87ce21c2641acbc"
},
"truncated": 0,
"non_truncated": 110,
"padded": 416,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:security_studies|0": {
"hashes": {
"hash_examples": "3f84bfeec717c6de",
"hash_full_prompts": "3f84bfeec717c6de",
"hash_input_tokens": "f8ce4708bf5dfd6e",
"hash_cont_tokens": "c6fd8ead17c21369"
},
"truncated": 0,
"non_truncated": 245,
"padded": 976,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:sociology|0": {
"hashes": {
"hash_examples": "10d7c2fae10bfcbc",
"hash_full_prompts": "10d7c2fae10bfcbc",
"hash_input_tokens": "25b45b1b50840c8f",
"hash_cont_tokens": "b9119e5745ee6117"
},
"truncated": 0,
"non_truncated": 201,
"padded": 788,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"hashes": {
"hash_examples": "bb05f02c38ddaf1a",
"hash_full_prompts": "bb05f02c38ddaf1a",
"hash_input_tokens": "a89c37eb60ea8ff5",
"hash_cont_tokens": "3c119595d829a161"
},
"truncated": 0,
"non_truncated": 100,
"padded": 384,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:virology|0": {
"hashes": {
"hash_examples": "290915a48884ede2",
"hash_full_prompts": "290915a48884ede2",
"hash_input_tokens": "cb328992cb54047e",
"hash_cont_tokens": "75c09075014f1114"
},
"truncated": 0,
"non_truncated": 166,
"padded": 652,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:world_religions|0": {
"hashes": {
"hash_examples": "91cc5451c7284f75",
"hash_full_prompts": "91cc5451c7284f75",
"hash_input_tokens": "8cfcad3cad1e89d3",
"hash_cont_tokens": "ecb299c479136706"
},
"truncated": 0,
"non_truncated": 171,
"padded": 652,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "bef69fb8b3b75f28",
"hash_full_prompts": "bef69fb8b3b75f28",
"hash_input_tokens": "88f7a10c592e54c2",
"hash_cont_tokens": "98e71c65630eb67e"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2331,
"non_padded": 72,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "bd066a9e6a140a4b",
"hash_full_prompts": "bd066a9e6a140a4b",
"hash_input_tokens": "238ac9fbfa251e46",
"hash_cont_tokens": "2edbcdb6e1c31560"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1532,
"non_padded": 56,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Ethics|0": {
"hashes": {
"hash_examples": "5d32da36271c5eb4",
"hash_full_prompts": "5d32da36271c5eb4",
"hash_input_tokens": "162a7aecdae8a92b",
"hash_cont_tokens": "9403afe6be071fd3"
},
"truncated": 0,
"non_truncated": 60,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Illegal|0": {
"hashes": {
"hash_examples": "0c07f1f100f2d0e8",
"hash_full_prompts": "0c07f1f100f2d0e8",
"hash_input_tokens": "47a9fae978f1a839",
"hash_cont_tokens": "c4c1d582c3de428f"
},
"truncated": 0,
"non_truncated": 53,
"padded": 158,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:MentalHealth|0": {
"hashes": {
"hash_examples": "8e5fc5c4704bd96b",
"hash_full_prompts": "8e5fc5c4704bd96b",
"hash_input_tokens": "0cf3a17870ab9cc4",
"hash_cont_tokens": "ddece1b0a9316221"
},
"truncated": 0,
"non_truncated": 76,
"padded": 219,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Offensive|0": {
"hashes": {
"hash_examples": "5ad4369b7dc5de46",
"hash_full_prompts": "5ad4369b7dc5de46",
"hash_input_tokens": "2b6bfd5ef3c81f61",
"hash_cont_tokens": "a7133b9cc89b9225"
},
"truncated": 0,
"non_truncated": 69,
"padded": 202,
"non_padded": 5,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:PhysicalHealth|0": {
"hashes": {
"hash_examples": "dc2a632e2dcc86db",
"hash_full_prompts": "dc2a632e2dcc86db",
"hash_input_tokens": "7c89640e05366244",
"hash_cont_tokens": "638273625d8ef1dc"
},
"truncated": 0,
"non_truncated": 73,
"padded": 207,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Privacy|0": {
"hashes": {
"hash_examples": "295e35448a39e003",
"hash_full_prompts": "295e35448a39e003",
"hash_input_tokens": "11dad94d907420d4",
"hash_cont_tokens": "1dc5174807ee41ed"
},
"truncated": 0,
"non_truncated": 57,
"padded": 159,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Trustfulness|0": {
"hashes": {
"hash_examples": "e79ac1ea5439e623",
"hash_full_prompts": "e79ac1ea5439e623",
"hash_input_tokens": "b0b15c4cee546777",
"hash_cont_tokens": "cc05ef6e19e62e40"
},
"truncated": 0,
"non_truncated": 78,
"padded": 222,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Unfairness|0": {
"hashes": {
"hash_examples": "4ac5dccbfbdc5077",
"hash_full_prompts": "4ac5dccbfbdc5077",
"hash_input_tokens": "3bfab88d892f0f5d",
"hash_cont_tokens": "1cf99947d87c13f3"
},
"truncated": 0,
"non_truncated": 55,
"padded": 156,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alrage_qa|0": {
"hashes": {
"hash_examples": "3edbbe22cabd4160",
"hash_full_prompts": "3edbbe22cabd4160",
"hash_input_tokens": "55453ef00381efe3",
"hash_cont_tokens": "0ac3fd16b70a4fe4"
},
"truncated": 2106,
"non_truncated": 0,
"padded": 2106,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "b8b3b49631adcc40",
"hash_full_prompts": "b8b3b49631adcc40",
"hash_input_tokens": "47fd62a40274f97f",
"hash_cont_tokens": "f5928bb0f06dfc2c"
},
"truncated": 84,
"non_truncated": 91778,
"padded": 329559,
"non_padded": 7325,
"num_truncated_few_shots": 0
}
}