Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
v2_results / Qwen /Qwen1.5-1.8B /results_2025-01-19T08-22-08.351491.json
amztheory's picture
Upload Qwen/Qwen1.5-1.8B/results_2025-01-19T08-22-08.351491.json with huggingface_hub
6c6fa13 verified
raw
history blame
246 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": 0,
"start_time": 704.982959585,
"end_time": 3092.531681457,
"total_evaluation_time_secondes": "2387.548721872",
"model_name": "Qwen/Qwen1.5-1.8B",
"model_sha": "7846de7ed421727b318d6605a0bfab659da2c067",
"model_dtype": "torch.bfloat16",
"model_size": "3.42 GB"
},
"results": {
"community|alghafa:mcq_exams_test_ar|0": {
"acc_norm": 0.236983842010772,
"acc_norm_stderr": 0.01803386605489648
},
"community|alghafa:meta_ar_dialects|0": {
"acc_norm": 0.24559777571825764,
"acc_norm_stderr": 0.0058608178451443425
},
"community|alghafa:meta_ar_msa|0": {
"acc_norm": 0.2536312849162011,
"acc_norm_stderr": 0.014551553659369918
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"acc_norm": 0.52,
"acc_norm_stderr": 0.05807730170189531
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"acc_norm": 0.38666666666666666,
"acc_norm_stderr": 0.039895463700310406
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"acc_norm": 0.24666666666666667,
"acc_norm_stderr": 0.03531471376356937
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"acc_norm": 0.5139462163852407,
"acc_norm_stderr": 0.00559009165619068
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"acc_norm": 0.3829858215179316,
"acc_norm_stderr": 0.006278856401453488
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"acc_norm": 0.4174418604651163,
"acc_norm_stderr": 0.011894048296224072
},
"community|arabic_exams|0": {
"acc_norm": 0.2383612662942272,
"acc_norm_stderr": 0.01840390396129297
},
"community|arabic_mmlu:Accounting (University)|0": {
"acc_norm": 0.33783783783783783,
"acc_norm_stderr": 0.05535729934952124
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"acc_norm": 0.24673202614379086,
"acc_norm_stderr": 0.0174408203674025
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"acc_norm": 0.22465753424657534,
"acc_norm_stderr": 0.02187542944965106
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"acc_norm": 0.2282051282051282,
"acc_norm_stderr": 0.02127839386358628
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.08594360757264022
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"acc_norm": 0.15873015873015872,
"acc_norm_stderr": 0.023065375254180547
},
"community|arabic_mmlu:Biology (High School)|0": {
"acc_norm": 0.30092264017033354,
"acc_norm_stderr": 0.012223307096115464
},
"community|arabic_mmlu:Civics (High School)|0": {
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.0481956028911523
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"acc_norm": 0.2457627118644068,
"acc_norm_stderr": 0.028085235109486757
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"acc_norm": 0.25287356321839083,
"acc_norm_stderr": 0.026956412412778324
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"acc_norm": 0.14814814814814814,
"acc_norm_stderr": 0.06966962541673782
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"acc_norm": 0.3736842105263158,
"acc_norm_stderr": 0.035189909668609055
},
"community|arabic_mmlu:Computer Science (University)|0": {
"acc_norm": 0.28125,
"acc_norm_stderr": 0.05664543544843536
},
"community|arabic_mmlu:Driving Test|0": {
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.012701119086920447
},
"community|arabic_mmlu:Economics (High School)|0": {
"acc_norm": 0.2833333333333333,
"acc_norm_stderr": 0.023782648315084427
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.05083285677753486
},
"community|arabic_mmlu:Economics (University)|0": {
"acc_norm": 0.24087591240875914,
"acc_norm_stderr": 0.0366676956748442
},
"community|arabic_mmlu:General Knowledge|0": {
"acc_norm": 0.2951388888888889,
"acc_norm_stderr": 0.015526000513709294
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"acc_norm": 0.3023255813953488,
"acc_norm_stderr": 0.0351209126342837
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"acc_norm": 0.24691358024691357,
"acc_norm_stderr": 0.033984584022153146
},
"community|arabic_mmlu:Geography (High School)|0": {
"acc_norm": 0.27842003853564545,
"acc_norm_stderr": 0.013918841793523946
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"acc_norm": 0.3235294117647059,
"acc_norm_stderr": 0.028418208619406797
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.05752139056405265
},
"community|arabic_mmlu:History (High School)|0": {
"acc_norm": 0.2578947368421053,
"acc_norm_stderr": 0.01587936412273345
},
"community|arabic_mmlu:History (Middle School)|0": {
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.03178529710642751
},
"community|arabic_mmlu:History (Primary School)|0": {
"acc_norm": 0.20588235294117646,
"acc_norm_stderr": 0.04023382273617747
},
"community|arabic_mmlu:Islamic Studies|0": {
"acc_norm": 0.21752738654147105,
"acc_norm_stderr": 0.01633357325977571
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"acc_norm": 0.27844311377245506,
"acc_norm_stderr": 0.024563000213225904
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"acc_norm": 0.22268907563025211,
"acc_norm_stderr": 0.027025433498882385
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"acc_norm": 0.26926926926926925,
"acc_norm_stderr": 0.014041283601823384
},
"community|arabic_mmlu:Law (Professional)|0": {
"acc_norm": 0.13694267515923567,
"acc_norm_stderr": 0.019431980070367748
},
"community|arabic_mmlu:Management (University)|0": {
"acc_norm": 0.41333333333333333,
"acc_norm_stderr": 0.05724401171194134
},
"community|arabic_mmlu:Math (Primary School)|0": {
"acc_norm": 0.2885085574572127,
"acc_norm_stderr": 0.022430249715111962
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"acc_norm": 0.23140495867768596,
"acc_norm_stderr": 0.027166056421232602
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.02402179716619147
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"acc_norm": 0.28205128205128205,
"acc_norm_stderr": 0.07299934324587597
},
"community|arabic_mmlu:Physics (High School)|0": {
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.028589716279779468
},
"community|arabic_mmlu:Political Science (University)|0": {
"acc_norm": 0.2904761904761905,
"acc_norm_stderr": 0.031402600480698775
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"acc_norm": 0.24066390041493776,
"acc_norm_stderr": 0.027594140943021907
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"acc_norm": 0.3446808510638298,
"acc_norm_stderr": 0.017912189617534848
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"acc_norm": 0.23,
"acc_norm_stderr": 0.04229525846816507
},
"community|arabic_mmlu_ht:anatomy|0": {
"acc_norm": 0.18518518518518517,
"acc_norm_stderr": 0.03355677216313142
},
"community|arabic_mmlu_ht:astronomy|0": {
"acc_norm": 0.19078947368421054,
"acc_norm_stderr": 0.03197565821032499
},
"community|arabic_mmlu_ht:business_ethics|0": {
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"acc_norm": 0.21509433962264152,
"acc_norm_stderr": 0.02528839450289137
},
"community|arabic_mmlu_ht:college_biology|0": {
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"community|arabic_mmlu_ht:college_medicine|0": {
"acc_norm": 0.2138728323699422,
"acc_norm_stderr": 0.03126511206173044
},
"community|arabic_mmlu_ht:college_physics|0": {
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.041583075330832865
},
"community|arabic_mmlu_ht:computer_security|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"acc_norm": 0.2723404255319149,
"acc_norm_stderr": 0.029101290698386698
},
"community|arabic_mmlu_ht:econometrics|0": {
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.039994238792813365
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"acc_norm": 0.23448275862068965,
"acc_norm_stderr": 0.035306258743465914
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"acc_norm": 0.21957671957671956,
"acc_norm_stderr": 0.021320018599770355
},
"community|arabic_mmlu_ht:formal_logic|0": {
"acc_norm": 0.29365079365079366,
"acc_norm_stderr": 0.040735243221471276
},
"community|arabic_mmlu_ht:global_facts|0": {
"acc_norm": 0.17,
"acc_norm_stderr": 0.0377525168068637
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"acc_norm": 0.2192499344348282,
"acc_norm_stderr": 0.006701149466109796
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"acc_norm": 0.21613545816733068,
"acc_norm_stderr": 0.006495921162811217
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165065
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"acc_norm": 0.2295142296368989,
"acc_norm_stderr": 0.00465780896971598
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"acc_norm": 0.18181818181818182,
"acc_norm_stderr": 0.027479603010538787
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"acc_norm": 0.19170984455958548,
"acc_norm_stderr": 0.028408953626245296
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"acc_norm": 0.21687997232791423,
"acc_norm_stderr": 0.0076661086236366575
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"acc_norm": 0.21481481481481482,
"acc_norm_stderr": 0.025040443877000683
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"acc_norm": 0.226890756302521,
"acc_norm_stderr": 0.02720537153827947
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"acc_norm": 0.1986754966887417,
"acc_norm_stderr": 0.03257847384436776
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"acc_norm": 0.1963302752293578,
"acc_norm_stderr": 0.017030719339154357
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"acc_norm": 0.21313799621928167,
"acc_norm_stderr": 0.0062959056896953166
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604246
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"acc_norm": 0.270042194092827,
"acc_norm_stderr": 0.028900721906293426
},
"community|arabic_mmlu_ht:human_aging|0": {
"acc_norm": 0.3004484304932735,
"acc_norm_stderr": 0.03076935200822914
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"acc_norm": 0.2595419847328244,
"acc_norm_stderr": 0.03844876139785271
},
"community|arabic_mmlu_ht:international_law|0": {
"acc_norm": 0.24793388429752067,
"acc_norm_stderr": 0.039418975265163025
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.04284467968052191
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"acc_norm": 0.22085889570552147,
"acc_norm_stderr": 0.032591773927421776
},
"community|arabic_mmlu_ht:machine_learning|0": {
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"community|arabic_mmlu_ht:management|0": {
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.03760178006026623
},
"community|arabic_mmlu_ht:marketing|0": {
"acc_norm": 0.3076923076923077,
"acc_norm_stderr": 0.030236389942173116
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"acc_norm": 0.21859504132231405,
"acc_norm_stderr": 0.008403114868439424
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.023267528432100174
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"acc_norm": 0.23687150837988827,
"acc_norm_stderr": 0.01421957078810399
},
"community|arabic_mmlu_ht:nutrition|0": {
"acc_norm": 0.22875816993464052,
"acc_norm_stderr": 0.024051029739912255
},
"community|arabic_mmlu_ht:philosophy|0": {
"acc_norm": 0.19614147909967847,
"acc_norm_stderr": 0.022552447780478026
},
"community|arabic_mmlu_ht:prehistory|0": {
"acc_norm": 0.21604938271604937,
"acc_norm_stderr": 0.022899162918445806
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"acc_norm": 0.21422241914045193,
"acc_norm_stderr": 0.006107305006852159
},
"community|arabic_mmlu_ht:professional_law|0": {
"acc_norm": 0.22987354450982847,
"acc_norm_stderr": 0.004708264630226396
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"acc_norm": 0.209529627367135,
"acc_norm_stderr": 0.010061758032677275
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"acc_norm": 0.22295175563802455,
"acc_norm_stderr": 0.007033498014244166
},
"community|arabic_mmlu_ht:public_relations|0": {
"acc_norm": 0.22727272727272727,
"acc_norm_stderr": 0.04013964554072775
},
"community|arabic_mmlu_ht:security_studies|0": {
"acc_norm": 0.18775510204081633,
"acc_norm_stderr": 0.025000256039546212
},
"community|arabic_mmlu_ht:sociology|0": {
"acc_norm": 0.24378109452736318,
"acc_norm_stderr": 0.03036049015401465
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"community|arabic_mmlu_ht:virology|0": {
"acc_norm": 0.2891566265060241,
"acc_norm_stderr": 0.03529486801511115
},
"community|arabic_mmlu_ht:world_religions|0": {
"acc_norm": 0.3157894736842105,
"acc_norm_stderr": 0.035650796707083106
},
"community|madinah_qa:Arabic Language (General)|0": {
"acc_norm": 0.2696078431372549,
"acc_norm_stderr": 0.017952449196987862
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"acc_norm": 0.29315068493150687,
"acc_norm_stderr": 0.02385932679013149
},
"community|aratrust:Ethics|0": {
"acc_norm": 0.4666666666666667,
"acc_norm_stderr": 0.06494964005966064
},
"community|aratrust:Illegal|0": {
"acc_norm": 0.3018867924528302,
"acc_norm_stderr": 0.06366244470090365
},
"community|aratrust:MentalHealth|0": {
"acc_norm": 0.3157894736842105,
"acc_norm_stderr": 0.05367388961676614
},
"community|aratrust:Offensive|0": {
"acc_norm": 0.4057971014492754,
"acc_norm_stderr": 0.05954802978721339
},
"community|aratrust:PhysicalHealth|0": {
"acc_norm": 0.2191780821917808,
"acc_norm_stderr": 0.048753781972745734
},
"community|aratrust:Privacy|0": {
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.061134390564663986
},
"community|aratrust:Trustfulness|0": {
"acc_norm": 0.2564102564102564,
"acc_norm_stderr": 0.0497609919747403
},
"community|aratrust:Unfairness|0": {
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.059278386873217015
},
"community|alghafa:_average|0": {
"acc_norm": 0.35599112603853916,
"acc_norm_stderr": 0.021721857008783783
},
"community|arabic_mmlu:_average|0": {
"acc_norm": 0.26677024335940114,
"acc_norm_stderr": 0.03272636430231531
},
"community|arabic_mmlu_ht:_average|0": {
"acc_norm": 0.23564779925494814,
"acc_norm_stderr": 0.02898512597049936
},
"community|madinah_qa:_average|0": {
"acc_norm": 0.28137926403438085,
"acc_norm_stderr": 0.020905887993559677
},
"community|aratrust:_average|0": {
"acc_norm": 0.3148149301794453,
"acc_norm_stderr": 0.057595194443738854
},
"all": {
"acc_norm": 0.25786948826254985,
"acc_norm_stderr": 0.029513019682922425,
"f1": 0.22260985532808425,
"f1_stderr": 0.050932696030003576
},
"community|alrage_qa|0": {
"llm_as_judge": 0.41889838556505204,
"llm_as_judge_stderr": 0.00023934858436313087
}
},
"versions": {
"community|alghafa:mcq_exams_test_ar|0": 0,
"community|alghafa:meta_ar_dialects|0": 0,
"community|alghafa:meta_ar_msa|0": 0,
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0,
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0,
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0,
"community|alghafa:multiple_choice_sentiment_task|0": 0,
"community|arabic_exams|0": 0,
"community|arabic_mmlu:Accounting (University)|0": 0,
"community|arabic_mmlu:Arabic Language (General)|0": 0,
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0,
"community|arabic_mmlu:Arabic Language (High School)|0": 0,
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0,
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0,
"community|arabic_mmlu:Biology (High School)|0": 0,
"community|arabic_mmlu:Civics (High School)|0": 0,
"community|arabic_mmlu:Civics (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (High School)|0": 0,
"community|arabic_mmlu:Computer Science (Middle School)|0": 0,
"community|arabic_mmlu:Computer Science (Primary School)|0": 0,
"community|arabic_mmlu:Computer Science (University)|0": 0,
"community|arabic_mmlu:Driving Test|0": 0,
"community|arabic_mmlu:Economics (High School)|0": 0,
"community|arabic_mmlu:Economics (Middle School)|0": 0,
"community|arabic_mmlu:Economics (University)|0": 0,
"community|arabic_mmlu:General Knowledge|0": 0,
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0,
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0,
"community|arabic_mmlu:Geography (High School)|0": 0,
"community|arabic_mmlu:Geography (Middle School)|0": 0,
"community|arabic_mmlu:Geography (Primary School)|0": 0,
"community|arabic_mmlu:History (High School)|0": 0,
"community|arabic_mmlu:History (Middle School)|0": 0,
"community|arabic_mmlu:History (Primary School)|0": 0,
"community|arabic_mmlu:Islamic Studies|0": 0,
"community|arabic_mmlu:Islamic Studies (High School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0,
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0,
"community|arabic_mmlu:Law (Professional)|0": 0,
"community|arabic_mmlu:Management (University)|0": 0,
"community|arabic_mmlu:Math (Primary School)|0": 0,
"community|arabic_mmlu:Natural Science (Middle School)|0": 0,
"community|arabic_mmlu:Natural Science (Primary School)|0": 0,
"community|arabic_mmlu:Philosophy (High School)|0": 0,
"community|arabic_mmlu:Physics (High School)|0": 0,
"community|arabic_mmlu:Political Science (University)|0": 0,
"community|arabic_mmlu:Social Science (Middle School)|0": 0,
"community|arabic_mmlu:Social Science (Primary School)|0": 0,
"community|arabic_mmlu_ht:abstract_algebra|0": 0,
"community|arabic_mmlu_ht:anatomy|0": 0,
"community|arabic_mmlu_ht:astronomy|0": 0,
"community|arabic_mmlu_ht:business_ethics|0": 0,
"community|arabic_mmlu_ht:clinical_knowledge|0": 0,
"community|arabic_mmlu_ht:college_biology|0": 0,
"community|arabic_mmlu_ht:college_chemistry|0": 0,
"community|arabic_mmlu_ht:college_computer_science|0": 0,
"community|arabic_mmlu_ht:college_mathematics|0": 0,
"community|arabic_mmlu_ht:college_medicine|0": 0,
"community|arabic_mmlu_ht:college_physics|0": 0,
"community|arabic_mmlu_ht:computer_security|0": 0,
"community|arabic_mmlu_ht:conceptual_physics|0": 0,
"community|arabic_mmlu_ht:econometrics|0": 0,
"community|arabic_mmlu_ht:electrical_engineering|0": 0,
"community|arabic_mmlu_ht:elementary_mathematics|0": 0,
"community|arabic_mmlu_ht:formal_logic|0": 0,
"community|arabic_mmlu_ht:global_facts|0": 0,
"community|arabic_mmlu_ht:high_school_biology|0": 0,
"community|arabic_mmlu_ht:high_school_chemistry|0": 0,
"community|arabic_mmlu_ht:high_school_computer_science|0": 0,
"community|arabic_mmlu_ht:high_school_european_history|0": 0,
"community|arabic_mmlu_ht:high_school_geography|0": 0,
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0,
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_mathematics|0": 0,
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0,
"community|arabic_mmlu_ht:high_school_physics|0": 0,
"community|arabic_mmlu_ht:high_school_psychology|0": 0,
"community|arabic_mmlu_ht:high_school_statistics|0": 0,
"community|arabic_mmlu_ht:high_school_us_history|0": 0,
"community|arabic_mmlu_ht:high_school_world_history|0": 0,
"community|arabic_mmlu_ht:human_aging|0": 0,
"community|arabic_mmlu_ht:human_sexuality|0": 0,
"community|arabic_mmlu_ht:international_law|0": 0,
"community|arabic_mmlu_ht:jurisprudence|0": 0,
"community|arabic_mmlu_ht:logical_fallacies|0": 0,
"community|arabic_mmlu_ht:machine_learning|0": 0,
"community|arabic_mmlu_ht:management|0": 0,
"community|arabic_mmlu_ht:marketing|0": 0,
"community|arabic_mmlu_ht:medical_genetics|0": 0,
"community|arabic_mmlu_ht:miscellaneous|0": 0,
"community|arabic_mmlu_ht:moral_disputes|0": 0,
"community|arabic_mmlu_ht:moral_scenarios|0": 0,
"community|arabic_mmlu_ht:nutrition|0": 0,
"community|arabic_mmlu_ht:philosophy|0": 0,
"community|arabic_mmlu_ht:prehistory|0": 0,
"community|arabic_mmlu_ht:professional_accounting|0": 0,
"community|arabic_mmlu_ht:professional_law|0": 0,
"community|arabic_mmlu_ht:professional_medicine|0": 0,
"community|arabic_mmlu_ht:professional_psychology|0": 0,
"community|arabic_mmlu_ht:public_relations|0": 0,
"community|arabic_mmlu_ht:security_studies|0": 0,
"community|arabic_mmlu_ht:sociology|0": 0,
"community|arabic_mmlu_ht:us_foreign_policy|0": 0,
"community|arabic_mmlu_ht:virology|0": 0,
"community|arabic_mmlu_ht:world_religions|0": 0,
"community|aratrust:Ethics|0": 0,
"community|aratrust:Illegal|0": 0,
"community|aratrust:MentalHealth|0": 0,
"community|aratrust:Offensive|0": 0,
"community|aratrust:PhysicalHealth|0": 0,
"community|aratrust:Privacy|0": 0,
"community|aratrust:Trustfulness|0": 0,
"community|aratrust:Unfairness|0": 0,
"community|madinah_qa:Arabic Language (General)|0": 0,
"community|madinah_qa:Arabic Language (Grammar)|0": 0,
"community|alrage_qa|0": 0
},
"config_tasks": {
"community|alghafa:mcq_exams_test_ar": {
"name": "alghafa:mcq_exams_test_ar",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "mcq_exams_test_ar",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 557,
"effective_num_docs": 557,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_dialects": {
"name": "alghafa:meta_ar_dialects",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_dialects",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5395,
"effective_num_docs": 5395,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:meta_ar_msa": {
"name": "alghafa:meta_ar_msa",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "meta_ar_msa",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": {
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_facts_truefalse_balanced_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task": {
"name": "alghafa:multiple_choice_grounded_statement_soqal_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_soqal_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": {
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 150,
"effective_num_docs": 150,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": {
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7995,
"effective_num_docs": 7995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task": {
"name": "alghafa:multiple_choice_rating_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_rating_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 5995,
"effective_num_docs": 5995,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alghafa:multiple_choice_sentiment_task": {
"name": "alghafa:multiple_choice_sentiment_task",
"prompt_function": "alghafa_pfn",
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native",
"hf_subset": "multiple_choice_sentiment_task",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1720,
"effective_num_docs": 1720,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_exams": {
"name": "arabic_exams",
"prompt_function": "arabic_exams_pfn",
"hf_repo": "OALL/Arabic_EXAMS",
"hf_subset": "default",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test",
"validation"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": "validation",
"few_shots_select": "sequential",
"generation_size": null,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 537,
"effective_num_docs": 537,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Accounting (University)": {
"name": "arabic_mmlu:Accounting (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Accounting (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 74,
"effective_num_docs": 74,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (General)": {
"name": "arabic_mmlu:Arabic Language (General)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)": {
"name": "arabic_mmlu:Arabic Language (Grammar)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (High School)": {
"name": "arabic_mmlu:Arabic Language (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 390,
"effective_num_docs": 390,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)": {
"name": "arabic_mmlu:Arabic Language (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)": {
"name": "arabic_mmlu:Arabic Language (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Arabic Language (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 252,
"effective_num_docs": 252,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Biology (High School)": {
"name": "arabic_mmlu:Biology (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Biology (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1409,
"effective_num_docs": 1409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (High School)": {
"name": "arabic_mmlu:Civics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Civics (Middle School)": {
"name": "arabic_mmlu:Civics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Civics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 236,
"effective_num_docs": 236,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (High School)": {
"name": "arabic_mmlu:Computer Science (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 261,
"effective_num_docs": 261,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Middle School)": {
"name": "arabic_mmlu:Computer Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 27,
"effective_num_docs": 27,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (Primary School)": {
"name": "arabic_mmlu:Computer Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 190,
"effective_num_docs": 190,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Computer Science (University)": {
"name": "arabic_mmlu:Computer Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Computer Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 64,
"effective_num_docs": 64,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Driving Test": {
"name": "arabic_mmlu:Driving Test",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Driving Test",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1211,
"effective_num_docs": 1211,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (High School)": {
"name": "arabic_mmlu:Economics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 360,
"effective_num_docs": 360,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (Middle School)": {
"name": "arabic_mmlu:Economics (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 87,
"effective_num_docs": 87,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Economics (University)": {
"name": "arabic_mmlu:Economics (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Economics (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 137,
"effective_num_docs": 137,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge": {
"name": "arabic_mmlu:General Knowledge",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 864,
"effective_num_docs": 864,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)": {
"name": "arabic_mmlu:General Knowledge (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 172,
"effective_num_docs": 172,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)": {
"name": "arabic_mmlu:General Knowledge (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "General Knowledge (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 162,
"effective_num_docs": 162,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (High School)": {
"name": "arabic_mmlu:Geography (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1038,
"effective_num_docs": 1038,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Middle School)": {
"name": "arabic_mmlu:Geography (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 272,
"effective_num_docs": 272,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Geography (Primary School)": {
"name": "arabic_mmlu:Geography (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Geography (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (High School)": {
"name": "arabic_mmlu:History (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 760,
"effective_num_docs": 760,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Middle School)": {
"name": "arabic_mmlu:History (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 203,
"effective_num_docs": 203,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:History (Primary School)": {
"name": "arabic_mmlu:History (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "History (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies": {
"name": "arabic_mmlu:Islamic Studies",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 639,
"effective_num_docs": 639,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (High School)": {
"name": "arabic_mmlu:Islamic Studies (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 334,
"effective_num_docs": 334,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)": {
"name": "arabic_mmlu:Islamic Studies (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)": {
"name": "arabic_mmlu:Islamic Studies (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Islamic Studies (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 999,
"effective_num_docs": 999,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Law (Professional)": {
"name": "arabic_mmlu:Law (Professional)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Law (Professional)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 314,
"effective_num_docs": 314,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Management (University)": {
"name": "arabic_mmlu:Management (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Management (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 75,
"effective_num_docs": 75,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Math (Primary School)": {
"name": "arabic_mmlu:Math (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Math (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 409,
"effective_num_docs": 409,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Middle School)": {
"name": "arabic_mmlu:Natural Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 242,
"effective_num_docs": 242,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Natural Science (Primary School)": {
"name": "arabic_mmlu:Natural Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Natural Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 336,
"effective_num_docs": 336,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Philosophy (High School)": {
"name": "arabic_mmlu:Philosophy (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Philosophy (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 39,
"effective_num_docs": 39,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Physics (High School)": {
"name": "arabic_mmlu:Physics (High School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Physics (High School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 255,
"effective_num_docs": 255,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Political Science (University)": {
"name": "arabic_mmlu:Political Science (University)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Political Science (University)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 210,
"effective_num_docs": 210,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Middle School)": {
"name": "arabic_mmlu:Social Science (Middle School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Middle School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 241,
"effective_num_docs": 241,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu:Social Science (Primary School)": {
"name": "arabic_mmlu:Social Science (Primary School)",
"prompt_function": "arabic_mmlu_pfn",
"hf_repo": "MBZUAI/ArabicMMLU",
"hf_subset": "Social Science (Primary School)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 705,
"effective_num_docs": 705,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:abstract_algebra": {
"name": "arabic_mmlu_ht:abstract_algebra",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "abstract_algebra",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:anatomy": {
"name": "arabic_mmlu_ht:anatomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "anatomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 135,
"effective_num_docs": 135,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:astronomy": {
"name": "arabic_mmlu_ht:astronomy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "astronomy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 152,
"effective_num_docs": 152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:business_ethics": {
"name": "arabic_mmlu_ht:business_ethics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "business_ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:clinical_knowledge": {
"name": "arabic_mmlu_ht:clinical_knowledge",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "clinical_knowledge",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 265,
"effective_num_docs": 265,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_biology": {
"name": "arabic_mmlu_ht:college_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 144,
"effective_num_docs": 144,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_chemistry": {
"name": "arabic_mmlu_ht:college_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_computer_science": {
"name": "arabic_mmlu_ht:college_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_mathematics": {
"name": "arabic_mmlu_ht:college_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_medicine": {
"name": "arabic_mmlu_ht:college_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 173,
"effective_num_docs": 173,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:college_physics": {
"name": "arabic_mmlu_ht:college_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "college_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 102,
"effective_num_docs": 102,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:computer_security": {
"name": "arabic_mmlu_ht:computer_security",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "computer_security",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:conceptual_physics": {
"name": "arabic_mmlu_ht:conceptual_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "conceptual_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:econometrics": {
"name": "arabic_mmlu_ht:econometrics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "econometrics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 114,
"effective_num_docs": 114,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:electrical_engineering": {
"name": "arabic_mmlu_ht:electrical_engineering",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "electrical_engineering",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 145,
"effective_num_docs": 145,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:elementary_mathematics": {
"name": "arabic_mmlu_ht:elementary_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "elementary_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 378,
"effective_num_docs": 378,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:formal_logic": {
"name": "arabic_mmlu_ht:formal_logic",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "formal_logic",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 126,
"effective_num_docs": 126,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:global_facts": {
"name": "arabic_mmlu_ht:global_facts",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "global_facts",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_biology": {
"name": "arabic_mmlu_ht:high_school_biology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_biology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3813,
"effective_num_docs": 3813,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_chemistry": {
"name": "arabic_mmlu_ht:high_school_chemistry",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_chemistry",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4016,
"effective_num_docs": 4016,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_computer_science": {
"name": "arabic_mmlu_ht:high_school_computer_science",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_computer_science",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_european_history": {
"name": "arabic_mmlu_ht:high_school_european_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_european_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 8152,
"effective_num_docs": 8152,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_geography": {
"name": "arabic_mmlu_ht:high_school_geography",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_geography",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 198,
"effective_num_docs": 198,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics": {
"name": "arabic_mmlu_ht:high_school_government_and_politics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_government_and_politics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 193,
"effective_num_docs": 193,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics": {
"name": "arabic_mmlu_ht:high_school_macroeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_macroeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2891,
"effective_num_docs": 2891,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_mathematics": {
"name": "arabic_mmlu_ht:high_school_mathematics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_mathematics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 270,
"effective_num_docs": 270,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics": {
"name": "arabic_mmlu_ht:high_school_microeconomics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_microeconomics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 238,
"effective_num_docs": 238,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_physics": {
"name": "arabic_mmlu_ht:high_school_physics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_physics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 151,
"effective_num_docs": 151,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_psychology": {
"name": "arabic_mmlu_ht:high_school_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 545,
"effective_num_docs": 545,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_statistics": {
"name": "arabic_mmlu_ht:high_school_statistics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_statistics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4232,
"effective_num_docs": 4232,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_us_history": {
"name": "arabic_mmlu_ht:high_school_us_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_us_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 204,
"effective_num_docs": 204,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:high_school_world_history": {
"name": "arabic_mmlu_ht:high_school_world_history",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "high_school_world_history",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 237,
"effective_num_docs": 237,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_aging": {
"name": "arabic_mmlu_ht:human_aging",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_aging",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 223,
"effective_num_docs": 223,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:human_sexuality": {
"name": "arabic_mmlu_ht:human_sexuality",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "human_sexuality",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 131,
"effective_num_docs": 131,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:international_law": {
"name": "arabic_mmlu_ht:international_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "international_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 121,
"effective_num_docs": 121,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:jurisprudence": {
"name": "arabic_mmlu_ht:jurisprudence",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "jurisprudence",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 108,
"effective_num_docs": 108,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:logical_fallacies": {
"name": "arabic_mmlu_ht:logical_fallacies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "logical_fallacies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 163,
"effective_num_docs": 163,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:machine_learning": {
"name": "arabic_mmlu_ht:machine_learning",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "machine_learning",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 112,
"effective_num_docs": 112,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:management": {
"name": "arabic_mmlu_ht:management",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "management",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 103,
"effective_num_docs": 103,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:marketing": {
"name": "arabic_mmlu_ht:marketing",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "marketing",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 234,
"effective_num_docs": 234,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:medical_genetics": {
"name": "arabic_mmlu_ht:medical_genetics",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "medical_genetics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:miscellaneous": {
"name": "arabic_mmlu_ht:miscellaneous",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "miscellaneous",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2420,
"effective_num_docs": 2420,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_disputes": {
"name": "arabic_mmlu_ht:moral_disputes",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_disputes",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 346,
"effective_num_docs": 346,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:moral_scenarios": {
"name": "arabic_mmlu_ht:moral_scenarios",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "moral_scenarios",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 895,
"effective_num_docs": 895,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:nutrition": {
"name": "arabic_mmlu_ht:nutrition",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "nutrition",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:philosophy": {
"name": "arabic_mmlu_ht:philosophy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "philosophy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 311,
"effective_num_docs": 311,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:prehistory": {
"name": "arabic_mmlu_ht:prehistory",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "prehistory",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 324,
"effective_num_docs": 324,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_accounting": {
"name": "arabic_mmlu_ht:professional_accounting",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_accounting",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 4514,
"effective_num_docs": 4514,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_law": {
"name": "arabic_mmlu_ht:professional_law",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_law",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 7987,
"effective_num_docs": 7987,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_medicine": {
"name": "arabic_mmlu_ht:professional_medicine",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_medicine",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 1637,
"effective_num_docs": 1637,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:professional_psychology": {
"name": "arabic_mmlu_ht:professional_psychology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "professional_psychology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 3503,
"effective_num_docs": 3503,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:public_relations": {
"name": "arabic_mmlu_ht:public_relations",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "public_relations",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 110,
"effective_num_docs": 110,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:security_studies": {
"name": "arabic_mmlu_ht:security_studies",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "security_studies",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 245,
"effective_num_docs": 245,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:sociology": {
"name": "arabic_mmlu_ht:sociology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "sociology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 201,
"effective_num_docs": 201,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:us_foreign_policy": {
"name": "arabic_mmlu_ht:us_foreign_policy",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "us_foreign_policy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 100,
"effective_num_docs": 100,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:virology": {
"name": "arabic_mmlu_ht:virology",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "virology",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 166,
"effective_num_docs": 166,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|arabic_mmlu_ht:world_religions": {
"name": "arabic_mmlu_ht:world_religions",
"prompt_function": "arabic_mmlu_ht_pfn",
"hf_repo": "MBZUAI/human_translated_arabic_mmlu",
"hf_subset": "world_religions",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 171,
"effective_num_docs": 171,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Ethics": {
"name": "aratrust:Ethics",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Ethics",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 60,
"effective_num_docs": 60,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Illegal": {
"name": "aratrust:Illegal",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Illegal",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 53,
"effective_num_docs": 53,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:MentalHealth": {
"name": "aratrust:MentalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "MentalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 76,
"effective_num_docs": 76,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Offensive": {
"name": "aratrust:Offensive",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Offensive",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 69,
"effective_num_docs": 69,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:PhysicalHealth": {
"name": "aratrust:PhysicalHealth",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "PhysicalHealth",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 73,
"effective_num_docs": 73,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Privacy": {
"name": "aratrust:Privacy",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Privacy",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 57,
"effective_num_docs": 57,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Trustfulness": {
"name": "aratrust:Trustfulness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Trustfulness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 78,
"effective_num_docs": 78,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|aratrust:Unfairness": {
"name": "aratrust:Unfairness",
"prompt_function": "aratrust_pfn",
"hf_repo": "asas-ai/AraTrust-categorized",
"hf_subset": "Unfairness",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 55,
"effective_num_docs": 55,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (General)": {
"name": "madinah_qa:Arabic Language (General)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (General)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 612,
"effective_num_docs": 612,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|madinah_qa:Arabic Language (Grammar)": {
"name": "madinah_qa:Arabic Language (Grammar)",
"prompt_function": "madinah_qa_pfn",
"hf_repo": "MBZUAI/MadinahQA",
"hf_subset": "Arabic Language (Grammar)",
"metric": [
{
"metric_name": "acc_norm",
"higher_is_better": true,
"category": "8",
"use_case": "1",
"sample_level_fn": "compute",
"corpus_level_fn": "mean"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"test"
],
"trust_dataset": true,
"evaluation_splits": [
"test"
],
"few_shots_split": [
"dev"
],
"few_shots_select": "sequential",
"generation_size": -1,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 365,
"effective_num_docs": 365,
"must_remove_duplicate_docs": false,
"version": 0
},
"community|alrage_qa": {
"name": "alrage_qa",
"prompt_function": "qa_prompt_arabic",
"hf_repo": "OALL/ALRAGE",
"hf_subset": null,
"metric": [
{
"metric_name": "llm_as_judge",
"higher_is_better": true,
"category": "7",
"use_case": "10",
"sample_level_fn": "_sample_level_fn",
"corpus_level_fn": "aggregate_scores"
}
],
"hf_revision": null,
"hf_filter": null,
"hf_avail_splits": [
"train"
],
"trust_dataset": true,
"evaluation_splits": [
"train"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 200,
"generation_grammar": null,
"stop_sequence": [],
"num_samples": null,
"suite": [
"community"
],
"original_num_docs": 2106,
"effective_num_docs": 2106,
"must_remove_duplicate_docs": false,
"version": 0
}
},
"summary_tasks": {
"community|alghafa:mcq_exams_test_ar|0": {
"hashes": {
"hash_examples": "c07a5e78c5c0b8fe",
"hash_full_prompts": "34da269ef01f8f35",
"hash_input_tokens": "6923d5bb0fe08d58",
"hash_cont_tokens": "d6519f4be64b0b3f"
},
"truncated": 0,
"non_truncated": 557,
"padded": 2228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_dialects|0": {
"hashes": {
"hash_examples": "c0b6081f83e14064",
"hash_full_prompts": "61fad94670ba0541",
"hash_input_tokens": "b01988bda81b3bac",
"hash_cont_tokens": "fe549766a0ce738a"
},
"truncated": 0,
"non_truncated": 5395,
"padded": 21580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:meta_ar_msa|0": {
"hashes": {
"hash_examples": "64eb78a7c5b7484b",
"hash_full_prompts": "b6672c11a56c4763",
"hash_input_tokens": "c893d3f9c4b8ef99",
"hash_cont_tokens": "0349d70fe949b783"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3580,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": {
"hashes": {
"hash_examples": "54fc3502c1c02c06",
"hash_full_prompts": "f671003d511ab7cc",
"hash_input_tokens": "b064fd88389c83f9",
"hash_cont_tokens": "65b9e8001c0d9d5e"
},
"truncated": 0,
"non_truncated": 75,
"padded": 150,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": {
"hashes": {
"hash_examples": "46572d83696552ae",
"hash_full_prompts": "04c86efa47d50e26",
"hash_input_tokens": "ba18044e4376ab4f",
"hash_cont_tokens": "707d382e7333be99"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": {
"hashes": {
"hash_examples": "f430d97ff715bc1c",
"hash_full_prompts": "3d3086e874381a32",
"hash_input_tokens": "8da72eadd510888d",
"hash_cont_tokens": "b2739963cb832e04"
},
"truncated": 0,
"non_truncated": 150,
"padded": 750,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": {
"hashes": {
"hash_examples": "6b70a7416584f98c",
"hash_full_prompts": "a485a541cb0cbd4f",
"hash_input_tokens": "a31fa255fe1b99f8",
"hash_cont_tokens": "b5f274f703abc5b7"
},
"truncated": 0,
"non_truncated": 7995,
"padded": 15990,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_rating_sentiment_task|0": {
"hashes": {
"hash_examples": "bc2005cc9d2f436e",
"hash_full_prompts": "11bb76ccb936df22",
"hash_input_tokens": "a7997ef542efd420",
"hash_cont_tokens": "87c18aae5cfd812b"
},
"truncated": 0,
"non_truncated": 5995,
"padded": 17921,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alghafa:multiple_choice_sentiment_task|0": {
"hashes": {
"hash_examples": "6fb0e254ea5945d8",
"hash_full_prompts": "0f6e11a7f1a6334d",
"hash_input_tokens": "073d4fea5ff1a3ce",
"hash_cont_tokens": "8e03dcc04ffbd0bd"
},
"truncated": 0,
"non_truncated": 1720,
"padded": 5096,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_exams|0": {
"hashes": {
"hash_examples": "6d721df351722656",
"hash_full_prompts": "913ee0eebf742d26",
"hash_input_tokens": "252136dfbde42904",
"hash_cont_tokens": "a4ef65bc7bab8dcf"
},
"truncated": 0,
"non_truncated": 537,
"padded": 2096,
"non_padded": 52,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Accounting (University)|0": {
"hashes": {
"hash_examples": "30e09697562ff9e7",
"hash_full_prompts": "014f8382a5a07c61",
"hash_input_tokens": "fd0d54e7088d35da",
"hash_cont_tokens": "587bf4caea1658f4"
},
"truncated": 0,
"non_truncated": 74,
"padded": 256,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "bef69fb8b3b75f28",
"hash_full_prompts": "a56fdc3a4fd4a26b",
"hash_input_tokens": "96cbacb17237e503",
"hash_cont_tokens": "95a234c727b7b43c"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2400,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "bd066a9e6a140a4b",
"hash_full_prompts": "a645c5a3387f2ec9",
"hash_input_tokens": "d5a70ed114eafdbd",
"hash_cont_tokens": "98f874e7446c544b"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1545,
"non_padded": 43,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (High School)|0": {
"hashes": {
"hash_examples": "a9c2cd9a9929292a",
"hash_full_prompts": "556559cb13dae610",
"hash_input_tokens": "56a4b65fc520ae1e",
"hash_cont_tokens": "3d676d0d2f081e05"
},
"truncated": 0,
"non_truncated": 390,
"padded": 1505,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Middle School)|0": {
"hashes": {
"hash_examples": "2f8a77bbbd0e21ff",
"hash_full_prompts": "f986f15dbc7e9100",
"hash_input_tokens": "e5baa9ef2fe6629c",
"hash_cont_tokens": "322ea7667dfc2c2d"
},
"truncated": 0,
"non_truncated": 27,
"padded": 105,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Arabic Language (Primary School)|0": {
"hashes": {
"hash_examples": "5eed3da47822539b",
"hash_full_prompts": "a531f0be2d434100",
"hash_input_tokens": "72d4b5a96c18d83b",
"hash_cont_tokens": "f3c78f80ddea1519"
},
"truncated": 0,
"non_truncated": 252,
"padded": 918,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Biology (High School)|0": {
"hashes": {
"hash_examples": "91ae6d22a0f0213d",
"hash_full_prompts": "8f7200111c3bda2f",
"hash_input_tokens": "f17ea151f736a9ab",
"hash_cont_tokens": "aaa20fdc3c06d2c3"
},
"truncated": 0,
"non_truncated": 1409,
"padded": 4968,
"non_padded": 88,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (High School)|0": {
"hashes": {
"hash_examples": "f27bf8791bea2bb9",
"hash_full_prompts": "b4d0b0deb74ca875",
"hash_input_tokens": "8608580ebc7443af",
"hash_cont_tokens": "e02c7ebfec7f8df8"
},
"truncated": 0,
"non_truncated": 87,
"padded": 312,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Civics (Middle School)|0": {
"hashes": {
"hash_examples": "74f5bb0098c8916f",
"hash_full_prompts": "0f5c26bab97f062e",
"hash_input_tokens": "323143117e557144",
"hash_cont_tokens": "1ffdd9a463183bfa"
},
"truncated": 0,
"non_truncated": 236,
"padded": 940,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (High School)|0": {
"hashes": {
"hash_examples": "a4278d7b525d46fe",
"hash_full_prompts": "6d031feae91089b4",
"hash_input_tokens": "744c0cbb36309d58",
"hash_cont_tokens": "821feca3d9004c98"
},
"truncated": 0,
"non_truncated": 261,
"padded": 994,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Middle School)|0": {
"hashes": {
"hash_examples": "0cb6c07e4b80dfd4",
"hash_full_prompts": "833911f410f06e26",
"hash_input_tokens": "9b724f122a74dca9",
"hash_cont_tokens": "8b4f299b6f012a83"
},
"truncated": 0,
"non_truncated": 27,
"padded": 100,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (Primary School)|0": {
"hashes": {
"hash_examples": "d96fc1bc32473533",
"hash_full_prompts": "f7dfadae4451a309",
"hash_input_tokens": "1dcb7b47c7198941",
"hash_cont_tokens": "1bc67f97b48b9ece"
},
"truncated": 0,
"non_truncated": 190,
"padded": 476,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Computer Science (University)|0": {
"hashes": {
"hash_examples": "8835587e436cbaff",
"hash_full_prompts": "3bd4a2b00782ff99",
"hash_input_tokens": "09e8f06b285bced1",
"hash_cont_tokens": "e9d871459bc85f62"
},
"truncated": 0,
"non_truncated": 64,
"padded": 247,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Driving Test|0": {
"hashes": {
"hash_examples": "7a4c38a2c451d075",
"hash_full_prompts": "432155ed8cf8b2e3",
"hash_input_tokens": "159f8d1290fee377",
"hash_cont_tokens": "cd411982b0f12d43"
},
"truncated": 0,
"non_truncated": 1211,
"padded": 3606,
"non_padded": 79,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (High School)|0": {
"hashes": {
"hash_examples": "c04c252836601279",
"hash_full_prompts": "60152ffac1e648c7",
"hash_input_tokens": "292945a13d8865c7",
"hash_cont_tokens": "4bda66df90f2d4d8"
},
"truncated": 0,
"non_truncated": 360,
"padded": 1374,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (Middle School)|0": {
"hashes": {
"hash_examples": "18fba1579406b3cc",
"hash_full_prompts": "fe97d6515b8ecbc2",
"hash_input_tokens": "75a611b2f1a6c070",
"hash_cont_tokens": "3ea283b0f50a72f5"
},
"truncated": 0,
"non_truncated": 87,
"padded": 344,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Economics (University)|0": {
"hashes": {
"hash_examples": "7c9e86fba8151562",
"hash_full_prompts": "12c31588a7785336",
"hash_input_tokens": "ca69bf384bfd42ec",
"hash_cont_tokens": "91cdb256248a5bdf"
},
"truncated": 0,
"non_truncated": 137,
"padded": 532,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge|0": {
"hashes": {
"hash_examples": "acfbe4e1f0314b85",
"hash_full_prompts": "d777e26be5ef6429",
"hash_input_tokens": "5ad6e00acd92f2f3",
"hash_cont_tokens": "76d704fbedbe5ab8"
},
"truncated": 0,
"non_truncated": 864,
"padded": 3169,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Middle School)|0": {
"hashes": {
"hash_examples": "03cd0ecf10224316",
"hash_full_prompts": "91f3146f8965b457",
"hash_input_tokens": "9b8cd4b7ff839d8d",
"hash_cont_tokens": "aff2aed9268be2e2"
},
"truncated": 0,
"non_truncated": 172,
"padded": 607,
"non_padded": 21,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:General Knowledge (Primary School)|0": {
"hashes": {
"hash_examples": "c3ee30196e05e122",
"hash_full_prompts": "8712e31ee1abdc1f",
"hash_input_tokens": "61d2328b056d80af",
"hash_cont_tokens": "6c8978669cdc11fb"
},
"truncated": 0,
"non_truncated": 162,
"padded": 629,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (High School)|0": {
"hashes": {
"hash_examples": "e2e329d2bdd9fb7b",
"hash_full_prompts": "4172b74f530012d1",
"hash_input_tokens": "74dea1c841be128f",
"hash_cont_tokens": "37e2e9c548d8c904"
},
"truncated": 0,
"non_truncated": 1038,
"padded": 4052,
"non_padded": 64,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Middle School)|0": {
"hashes": {
"hash_examples": "420b161444291989",
"hash_full_prompts": "e8a15bb69931c97f",
"hash_input_tokens": "53f0d9ce48fbfb4a",
"hash_cont_tokens": "5e24bb4c8be23901"
},
"truncated": 0,
"non_truncated": 272,
"padded": 966,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Geography (Primary School)|0": {
"hashes": {
"hash_examples": "5bc5ca48a4210899",
"hash_full_prompts": "515820199d698444",
"hash_input_tokens": "dd0eccd5d277d6ca",
"hash_cont_tokens": "b9f45957a97d1ecf"
},
"truncated": 0,
"non_truncated": 57,
"padded": 216,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (High School)|0": {
"hashes": {
"hash_examples": "c7cc37f29311bea1",
"hash_full_prompts": "8265a8ac74249261",
"hash_input_tokens": "a63a60de6993d266",
"hash_cont_tokens": "b16e65544485acae"
},
"truncated": 0,
"non_truncated": 760,
"padded": 2886,
"non_padded": 76,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Middle School)|0": {
"hashes": {
"hash_examples": "5b9f1973337153a2",
"hash_full_prompts": "807d9b29253a2d4b",
"hash_input_tokens": "e8ac2a1d4966717b",
"hash_cont_tokens": "e3b355a58a286ee4"
},
"truncated": 0,
"non_truncated": 203,
"padded": 734,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:History (Primary School)|0": {
"hashes": {
"hash_examples": "af2469847007c1fe",
"hash_full_prompts": "b3af283f338ba0e1",
"hash_input_tokens": "3eeeed4796250963",
"hash_cont_tokens": "a52a22630c3cb3f7"
},
"truncated": 0,
"non_truncated": 102,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies|0": {
"hashes": {
"hash_examples": "c8da9b2f16a5ea0f",
"hash_full_prompts": "2661393fe36f1ba5",
"hash_input_tokens": "77e9ec7b54a0be43",
"hash_cont_tokens": "1866597a67ff4424"
},
"truncated": 0,
"non_truncated": 639,
"padded": 2493,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (High School)|0": {
"hashes": {
"hash_examples": "efb11bc8ef398117",
"hash_full_prompts": "b0bee4a17b50c2f3",
"hash_input_tokens": "5e781abbbd5f3722",
"hash_cont_tokens": "6b678abb2fd451bd"
},
"truncated": 0,
"non_truncated": 334,
"padded": 1281,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Middle School)|0": {
"hashes": {
"hash_examples": "9e33ab030eebdb99",
"hash_full_prompts": "f425f870e9f35d88",
"hash_input_tokens": "3202b8f5803bdaec",
"hash_cont_tokens": "e0c922e595ad51cd"
},
"truncated": 0,
"non_truncated": 238,
"padded": 867,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Islamic Studies (Primary School)|0": {
"hashes": {
"hash_examples": "4167565d878b20eb",
"hash_full_prompts": "78a6a816c859d681",
"hash_input_tokens": "c0805d905c37c643",
"hash_cont_tokens": "97e2182a63c5686c"
},
"truncated": 0,
"non_truncated": 999,
"padded": 2969,
"non_padded": 55,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Law (Professional)|0": {
"hashes": {
"hash_examples": "e77f52c8fe4352b3",
"hash_full_prompts": "f2150d3a3d7fef4e",
"hash_input_tokens": "fbba3a0468816760",
"hash_cont_tokens": "324cc46c561b417c"
},
"truncated": 0,
"non_truncated": 314,
"padded": 1223,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Management (University)|0": {
"hashes": {
"hash_examples": "09682649b04b7327",
"hash_full_prompts": "846428f541275e8a",
"hash_input_tokens": "cdcff5b9faff9042",
"hash_cont_tokens": "1e98e1e2cd19a5e3"
},
"truncated": 0,
"non_truncated": 75,
"padded": 200,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Math (Primary School)|0": {
"hashes": {
"hash_examples": "edb027bfae7e76f1",
"hash_full_prompts": "66c9ce547e0542b4",
"hash_input_tokens": "2d1ef9e9ea905c17",
"hash_cont_tokens": "632401a080490684"
},
"truncated": 0,
"non_truncated": 409,
"padded": 1290,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Middle School)|0": {
"hashes": {
"hash_examples": "96e72c9094c2364c",
"hash_full_prompts": "ebfcce496ef1dae9",
"hash_input_tokens": "1ab5edc99e1e4017",
"hash_cont_tokens": "17e42af5dbb9eee1"
},
"truncated": 0,
"non_truncated": 242,
"padded": 924,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Natural Science (Primary School)|0": {
"hashes": {
"hash_examples": "69e35bad3dec5a4d",
"hash_full_prompts": "347440e2faadc755",
"hash_input_tokens": "9cc83b51545fb956",
"hash_cont_tokens": "a7423721c9837336"
},
"truncated": 0,
"non_truncated": 336,
"padded": 1206,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Philosophy (High School)|0": {
"hashes": {
"hash_examples": "dc6ebd484a02fca5",
"hash_full_prompts": "36346c36d96b7742",
"hash_input_tokens": "b10f293b6c3f00b5",
"hash_cont_tokens": "69b31fc6977897bf"
},
"truncated": 0,
"non_truncated": 39,
"padded": 156,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Physics (High School)|0": {
"hashes": {
"hash_examples": "58a1722472c9e644",
"hash_full_prompts": "aa53ca80de338b24",
"hash_input_tokens": "014e20b796435f24",
"hash_cont_tokens": "f9506aa86f66954d"
},
"truncated": 0,
"non_truncated": 255,
"padded": 996,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Political Science (University)|0": {
"hashes": {
"hash_examples": "07a4ed6aabbdfd1e",
"hash_full_prompts": "fbd170954ef7b83d",
"hash_input_tokens": "be0ec1f4020682bf",
"hash_cont_tokens": "4799b66f49438465"
},
"truncated": 0,
"non_truncated": 210,
"padded": 688,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Middle School)|0": {
"hashes": {
"hash_examples": "8ca955902f304664",
"hash_full_prompts": "0dc9370d58faf348",
"hash_input_tokens": "8a67fb1df3af3a28",
"hash_cont_tokens": "4602cb88db99312d"
},
"truncated": 0,
"non_truncated": 241,
"padded": 919,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu:Social Science (Primary School)|0": {
"hashes": {
"hash_examples": "934025ab3738123c",
"hash_full_prompts": "c5d972f5b1007ee1",
"hash_input_tokens": "071f3c2d3a329c65",
"hash_cont_tokens": "19e973e9f05c9c82"
},
"truncated": 0,
"non_truncated": 705,
"padded": 2004,
"non_padded": 39,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:abstract_algebra|0": {
"hashes": {
"hash_examples": "0b557911f2f6d919",
"hash_full_prompts": "ab1666c18f658f17",
"hash_input_tokens": "5079eae0132f9572",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:anatomy|0": {
"hashes": {
"hash_examples": "a552d8a0ef294061",
"hash_full_prompts": "a1b8457af2bd2730",
"hash_input_tokens": "c32c8d70e09bc26c",
"hash_cont_tokens": "96c000fa61c3bd55"
},
"truncated": 0,
"non_truncated": 135,
"padded": 532,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:astronomy|0": {
"hashes": {
"hash_examples": "c4a372d0af7da098",
"hash_full_prompts": "11384879a4089109",
"hash_input_tokens": "fcbbfb05e4b93f9e",
"hash_cont_tokens": "b13cc32205751d90"
},
"truncated": 0,
"non_truncated": 152,
"padded": 604,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:business_ethics|0": {
"hashes": {
"hash_examples": "9f71d816abf8af7a",
"hash_full_prompts": "ddf9e4b521d07cbb",
"hash_input_tokens": "cf8d21a7f4ff0969",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:clinical_knowledge|0": {
"hashes": {
"hash_examples": "38303cd765589ef3",
"hash_full_prompts": "c939850c18c1f485",
"hash_input_tokens": "9045f4eaad5458f9",
"hash_cont_tokens": "c771582839d4f30c"
},
"truncated": 0,
"non_truncated": 265,
"padded": 1028,
"non_padded": 32,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_biology|0": {
"hashes": {
"hash_examples": "dbd9b5d318e60b04",
"hash_full_prompts": "ac205b3759b3a9a4",
"hash_input_tokens": "a1311e4adca9b882",
"hash_cont_tokens": "ec774ac0d0ad658b"
},
"truncated": 0,
"non_truncated": 144,
"padded": 572,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_chemistry|0": {
"hashes": {
"hash_examples": "6f88491d03db8a4c",
"hash_full_prompts": "159a67ca2e736fab",
"hash_input_tokens": "f85290c69697e465",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_computer_science|0": {
"hashes": {
"hash_examples": "ebfdee5ef2ed5e17",
"hash_full_prompts": "d0dc6217b8cfb3bd",
"hash_input_tokens": "ef316d9cb195026e",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_mathematics|0": {
"hashes": {
"hash_examples": "e3f22cd7712aae2f",
"hash_full_prompts": "3bc7e5011e0aa6af",
"hash_input_tokens": "4bbae0d48aa22f2a",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_medicine|0": {
"hashes": {
"hash_examples": "51a5501373afb5a7",
"hash_full_prompts": "0311a774622dfa8f",
"hash_input_tokens": "e161e1c9df0da661",
"hash_cont_tokens": "1823a754e6394181"
},
"truncated": 0,
"non_truncated": 173,
"padded": 680,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:college_physics|0": {
"hashes": {
"hash_examples": "2d3e015989b108db",
"hash_full_prompts": "877dc1bc05688081",
"hash_input_tokens": "f7166f28b9296efa",
"hash_cont_tokens": "ee5dc873d27b9e10"
},
"truncated": 0,
"non_truncated": 102,
"padded": 404,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:computer_security|0": {
"hashes": {
"hash_examples": "f8810eddc38dfee4",
"hash_full_prompts": "1e3a2c34fa2e6065",
"hash_input_tokens": "fcf4df2c7578071d",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 396,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:conceptual_physics|0": {
"hashes": {
"hash_examples": "211e32cc43c6b1dc",
"hash_full_prompts": "dd90bd4c0e043021",
"hash_input_tokens": "7209354300868dcf",
"hash_cont_tokens": "b7b580bbcf7e0afa"
},
"truncated": 0,
"non_truncated": 235,
"padded": 896,
"non_padded": 44,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:econometrics|0": {
"hashes": {
"hash_examples": "810023786b2484d2",
"hash_full_prompts": "49dc7af5e2fff251",
"hash_input_tokens": "c1cbfe2f6ee82b62",
"hash_cont_tokens": "d44932b2a931e093"
},
"truncated": 0,
"non_truncated": 114,
"padded": 452,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:electrical_engineering|0": {
"hashes": {
"hash_examples": "a222760c93eaa1ee",
"hash_full_prompts": "9be471685d280ff2",
"hash_input_tokens": "e54d895cc5de8a32",
"hash_cont_tokens": "159f4cb1232d2a3c"
},
"truncated": 0,
"non_truncated": 145,
"padded": 544,
"non_padded": 36,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:elementary_mathematics|0": {
"hashes": {
"hash_examples": "4c069aeee64dc227",
"hash_full_prompts": "d0598901df4b6269",
"hash_input_tokens": "5bac0a913f1b3894",
"hash_cont_tokens": "2bf44b70baf49dfa"
},
"truncated": 0,
"non_truncated": 378,
"padded": 1500,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:formal_logic|0": {
"hashes": {
"hash_examples": "3cb0ccbf8e8a77ae",
"hash_full_prompts": "9e96fb012b599a96",
"hash_input_tokens": "592697e629ae9267",
"hash_cont_tokens": "8765c45f4711ebb8"
},
"truncated": 0,
"non_truncated": 126,
"padded": 504,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:global_facts|0": {
"hashes": {
"hash_examples": "c1d039e64ea321b9",
"hash_full_prompts": "e4544b657c5cfaea",
"hash_input_tokens": "7c1b792505c2903b",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 388,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_biology|0": {
"hashes": {
"hash_examples": "ddcb8237bb4ba08a",
"hash_full_prompts": "bd321c6f2d140a86",
"hash_input_tokens": "08cd6adecbfb143b",
"hash_cont_tokens": "49908817551a4513"
},
"truncated": 0,
"non_truncated": 3813,
"padded": 15100,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_chemistry|0": {
"hashes": {
"hash_examples": "07061b55c5c436d9",
"hash_full_prompts": "00fece3fc3f4de3c",
"hash_input_tokens": "6b73c4463988a724",
"hash_cont_tokens": "a7f16a586e1cfe0f"
},
"truncated": 0,
"non_truncated": 4016,
"padded": 15912,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_computer_science|0": {
"hashes": {
"hash_examples": "8d3405483d5fdcff",
"hash_full_prompts": "13071966e25b866c",
"hash_input_tokens": "42669e64f9dfb8f2",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 400,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_european_history|0": {
"hashes": {
"hash_examples": "031c49a430356414",
"hash_full_prompts": "b56bb1e7116e262d",
"hash_input_tokens": "a78b066120563f71",
"hash_cont_tokens": "5420388845898571"
},
"truncated": 0,
"non_truncated": 8152,
"padded": 32448,
"non_padded": 160,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_geography|0": {
"hashes": {
"hash_examples": "d0ce2b019a66c1de",
"hash_full_prompts": "2f2cb46ddc7d6890",
"hash_input_tokens": "9b0387ed8198a7b5",
"hash_cont_tokens": "fa4a2c8384dfaaa5"
},
"truncated": 0,
"non_truncated": 198,
"padded": 768,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_government_and_politics|0": {
"hashes": {
"hash_examples": "7d7c6d476d0576b1",
"hash_full_prompts": "2edbb98d3256db30",
"hash_input_tokens": "b2549cdc000bbeae",
"hash_cont_tokens": "682709d2fa91c75e"
},
"truncated": 0,
"non_truncated": 193,
"padded": 768,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_macroeconomics|0": {
"hashes": {
"hash_examples": "694d3a01c6144ddb",
"hash_full_prompts": "c87be5bed46f74bb",
"hash_input_tokens": "458c0d25c43f710b",
"hash_cont_tokens": "4f2f97c723cb220f"
},
"truncated": 0,
"non_truncated": 2891,
"padded": 11440,
"non_padded": 124,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_mathematics|0": {
"hashes": {
"hash_examples": "004f9c0a40b5ec10",
"hash_full_prompts": "eb9958f57fb1a315",
"hash_input_tokens": "f73c9da82586e658",
"hash_cont_tokens": "8130a825e5a2ee3d"
},
"truncated": 0,
"non_truncated": 270,
"padded": 1072,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_microeconomics|0": {
"hashes": {
"hash_examples": "80cf03d462e6ccbc",
"hash_full_prompts": "ced3eb77bcc80c9d",
"hash_input_tokens": "eec9f88ca1e8de5d",
"hash_cont_tokens": "4f6974070ef28d29"
},
"truncated": 0,
"non_truncated": 238,
"padded": 948,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_physics|0": {
"hashes": {
"hash_examples": "92218def5b383845",
"hash_full_prompts": "7fa02eb03fed5d6d",
"hash_input_tokens": "169808d5c0840165",
"hash_cont_tokens": "5d32bcd7ba8252ba"
},
"truncated": 0,
"non_truncated": 151,
"padded": 604,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_psychology|0": {
"hashes": {
"hash_examples": "323f7848fee32e58",
"hash_full_prompts": "9f5b206cff8b7a65",
"hash_input_tokens": "5235bbbe166d28c6",
"hash_cont_tokens": "1512a6938229952b"
},
"truncated": 0,
"non_truncated": 545,
"padded": 2156,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_statistics|0": {
"hashes": {
"hash_examples": "d7bbe0d037cf31ec",
"hash_full_prompts": "5c1dd8fd44464945",
"hash_input_tokens": "7604208535e138d7",
"hash_cont_tokens": "95cb29e5c31221c8"
},
"truncated": 0,
"non_truncated": 4232,
"padded": 16776,
"non_padded": 152,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_us_history|0": {
"hashes": {
"hash_examples": "722ec9207e3b0e04",
"hash_full_prompts": "7cf272ea246b07e8",
"hash_input_tokens": "ebb5fe4a5ce3786b",
"hash_cont_tokens": "0c31c2de1e3429bf"
},
"truncated": 0,
"non_truncated": 204,
"padded": 816,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:high_school_world_history|0": {
"hashes": {
"hash_examples": "b5eb675d3b578584",
"hash_full_prompts": "38fcc21b2ee517d0",
"hash_input_tokens": "9afc0d58abefd264",
"hash_cont_tokens": "5e704d9d54138833"
},
"truncated": 0,
"non_truncated": 237,
"padded": 948,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_aging|0": {
"hashes": {
"hash_examples": "713ac79cd2dd2d7b",
"hash_full_prompts": "e4c482e811c5cae4",
"hash_input_tokens": "f85025f22480910a",
"hash_cont_tokens": "e5a3e63957647f04"
},
"truncated": 0,
"non_truncated": 223,
"padded": 868,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:human_sexuality|0": {
"hashes": {
"hash_examples": "47551ab4e5dcf6c5",
"hash_full_prompts": "ebfecc50e59782af",
"hash_input_tokens": "6bcc1e9f3c419603",
"hash_cont_tokens": "90a9b6d1231332f4"
},
"truncated": 0,
"non_truncated": 131,
"padded": 512,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:international_law|0": {
"hashes": {
"hash_examples": "da360336943398d5",
"hash_full_prompts": "0f0f5a324b7fc03f",
"hash_input_tokens": "12d87c935a9710ba",
"hash_cont_tokens": "9ab33ab519d55748"
},
"truncated": 0,
"non_truncated": 121,
"padded": 484,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:jurisprudence|0": {
"hashes": {
"hash_examples": "661d161a486fb035",
"hash_full_prompts": "45ffd4e537d1ecb0",
"hash_input_tokens": "91825a776ea8346c",
"hash_cont_tokens": "2cecb6db3790a23b"
},
"truncated": 0,
"non_truncated": 108,
"padded": 432,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:logical_fallacies|0": {
"hashes": {
"hash_examples": "5c3926384758bda7",
"hash_full_prompts": "08766a3dcdafe4a0",
"hash_input_tokens": "cc9da80d59b87d6a",
"hash_cont_tokens": "f5c60e363dd9fc3d"
},
"truncated": 0,
"non_truncated": 163,
"padded": 640,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:machine_learning|0": {
"hashes": {
"hash_examples": "3ce756e6a22ffc48",
"hash_full_prompts": "913cd017b1e491cc",
"hash_input_tokens": "4a08820961800d38",
"hash_cont_tokens": "d41e7e44237c0a16"
},
"truncated": 0,
"non_truncated": 112,
"padded": 444,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:management|0": {
"hashes": {
"hash_examples": "20fe769bb3276832",
"hash_full_prompts": "e576a331044c699a",
"hash_input_tokens": "fc2275bcab7861b5",
"hash_cont_tokens": "372864196dbb4cad"
},
"truncated": 0,
"non_truncated": 103,
"padded": 396,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:marketing|0": {
"hashes": {
"hash_examples": "6b19449559d987ce",
"hash_full_prompts": "4ea4e3c330485c1d",
"hash_input_tokens": "c3e3daba0ac3f472",
"hash_cont_tokens": "ad74b6b4e88f6100"
},
"truncated": 0,
"non_truncated": 234,
"padded": 932,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:medical_genetics|0": {
"hashes": {
"hash_examples": "cbb0fa9df0f5435a",
"hash_full_prompts": "21505302fe6bdcd1",
"hash_input_tokens": "c8a7ee5675a5bb7a",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 384,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:miscellaneous|0": {
"hashes": {
"hash_examples": "0a4134046c23cff9",
"hash_full_prompts": "2a7ee3dc1b2a4577",
"hash_input_tokens": "699dc756051bfadc",
"hash_cont_tokens": "f2831dc319b7001c"
},
"truncated": 0,
"non_truncated": 2420,
"padded": 9580,
"non_padded": 100,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_disputes|0": {
"hashes": {
"hash_examples": "1ac8a0967c82caa0",
"hash_full_prompts": "0023386ebe7b5251",
"hash_input_tokens": "e4cf05f28df96426",
"hash_cont_tokens": "d6a32c4f89ec0e43"
},
"truncated": 0,
"non_truncated": 346,
"padded": 1368,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:moral_scenarios|0": {
"hashes": {
"hash_examples": "2c0670188bc5a789",
"hash_full_prompts": "47e3f8545e3d3c32",
"hash_input_tokens": "9952daff4e011ef4",
"hash_cont_tokens": "0abad6841e9b5dc1"
},
"truncated": 0,
"non_truncated": 895,
"padded": 3504,
"non_padded": 76,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:nutrition|0": {
"hashes": {
"hash_examples": "658628c0dcdfe201",
"hash_full_prompts": "412f7c0ac82ba72e",
"hash_input_tokens": "7603e517cea1e5f0",
"hash_cont_tokens": "1947ff415070dfa5"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1208,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:philosophy|0": {
"hashes": {
"hash_examples": "8b6707b322affafd",
"hash_full_prompts": "b2722fcc1ba8b040",
"hash_input_tokens": "92612b2ecfc31d25",
"hash_cont_tokens": "566ed263a8423f58"
},
"truncated": 0,
"non_truncated": 311,
"padded": 1224,
"non_padded": 20,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:prehistory|0": {
"hashes": {
"hash_examples": "0c85ffcdc9a7b367",
"hash_full_prompts": "ef86135380b03be6",
"hash_input_tokens": "e3cd41d4395b7391",
"hash_cont_tokens": "69725bb3099f23d0"
},
"truncated": 0,
"non_truncated": 324,
"padded": 1272,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_accounting|0": {
"hashes": {
"hash_examples": "cce1ea2d5f544b2f",
"hash_full_prompts": "4d261f28b2e35e1f",
"hash_input_tokens": "bf073f8a6cf46c16",
"hash_cont_tokens": "25802ac32c51a7f7"
},
"truncated": 0,
"non_truncated": 4514,
"padded": 17819,
"non_padded": 237,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_law|0": {
"hashes": {
"hash_examples": "1c654b024b54eb4b",
"hash_full_prompts": "e2024afbd2985ece",
"hash_input_tokens": "95ac84d94dcedc32",
"hash_cont_tokens": "7f2b1b7218a1ef40"
},
"truncated": 0,
"non_truncated": 7987,
"padded": 31596,
"non_padded": 352,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_medicine|0": {
"hashes": {
"hash_examples": "c621eaacfa662ebc",
"hash_full_prompts": "0ce54ee1d51cd659",
"hash_input_tokens": "5ea759ee379e7f00",
"hash_cont_tokens": "e1afe1503a5d02c5"
},
"truncated": 0,
"non_truncated": 1637,
"padded": 6476,
"non_padded": 72,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:professional_psychology|0": {
"hashes": {
"hash_examples": "bc14a28eaec87dc4",
"hash_full_prompts": "a7bd0bdf13bf05ad",
"hash_input_tokens": "02836beea9da2008",
"hash_cont_tokens": "1c41f3eeadeec685"
},
"truncated": 0,
"non_truncated": 3503,
"padded": 13760,
"non_padded": 252,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:public_relations|0": {
"hashes": {
"hash_examples": "de4989d9375885c4",
"hash_full_prompts": "879d5e9e9856e7c5",
"hash_input_tokens": "334ff76fd24a18fc",
"hash_cont_tokens": "3914ab4a5d5b69e8"
},
"truncated": 0,
"non_truncated": 110,
"padded": 432,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:security_studies|0": {
"hashes": {
"hash_examples": "3f84bfeec717c6de",
"hash_full_prompts": "da38cc1f2c39b0a5",
"hash_input_tokens": "83c7a4db5babd791",
"hash_cont_tokens": "1d2b199df736dea4"
},
"truncated": 0,
"non_truncated": 245,
"padded": 972,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:sociology|0": {
"hashes": {
"hash_examples": "10d7c2fae10bfcbc",
"hash_full_prompts": "539888fca87366ec",
"hash_input_tokens": "a1fb5a6cf3ff2e23",
"hash_cont_tokens": "3556cb090eda6dec"
},
"truncated": 0,
"non_truncated": 201,
"padded": 788,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:us_foreign_policy|0": {
"hashes": {
"hash_examples": "bb05f02c38ddaf1a",
"hash_full_prompts": "49643b9cc74703b0",
"hash_input_tokens": "8d98609b2cfba0bb",
"hash_cont_tokens": "d35519013f781909"
},
"truncated": 0,
"non_truncated": 100,
"padded": 376,
"non_padded": 24,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:virology|0": {
"hashes": {
"hash_examples": "290915a48884ede2",
"hash_full_prompts": "28cb11c95e12eb0d",
"hash_input_tokens": "30eafbb13388b0ab",
"hash_cont_tokens": "cbf93f8f3bd5c82c"
},
"truncated": 0,
"non_truncated": 166,
"padded": 636,
"non_padded": 28,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|arabic_mmlu_ht:world_religions|0": {
"hashes": {
"hash_examples": "91cc5451c7284f75",
"hash_full_prompts": "d999068b8d3041e7",
"hash_input_tokens": "e57d2e1f5af5b238",
"hash_cont_tokens": "b5fbc024ac54a858"
},
"truncated": 0,
"non_truncated": 171,
"padded": 672,
"non_padded": 12,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (General)|0": {
"hashes": {
"hash_examples": "bef69fb8b3b75f28",
"hash_full_prompts": "a56fdc3a4fd4a26b",
"hash_input_tokens": "1c8c001b5a23f016",
"hash_cont_tokens": "05d3f2bc980e6cbb"
},
"truncated": 0,
"non_truncated": 612,
"padded": 2346,
"non_padded": 57,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|madinah_qa:Arabic Language (Grammar)|0": {
"hashes": {
"hash_examples": "bd066a9e6a140a4b",
"hash_full_prompts": "a645c5a3387f2ec9",
"hash_input_tokens": "8f43525a133bb164",
"hash_cont_tokens": "ac1327c8a93a78f2"
},
"truncated": 0,
"non_truncated": 365,
"padded": 1521,
"non_padded": 67,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Ethics|0": {
"hashes": {
"hash_examples": "5d32da36271c5eb4",
"hash_full_prompts": "cef59e1280d3cec3",
"hash_input_tokens": "8a370251b3fff368",
"hash_cont_tokens": "67fe5dc315ef723c"
},
"truncated": 0,
"non_truncated": 60,
"padded": 180,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Illegal|0": {
"hashes": {
"hash_examples": "0c07f1f100f2d0e8",
"hash_full_prompts": "e752cd923d178cad",
"hash_input_tokens": "a1d60f247c61f56a",
"hash_cont_tokens": "2cc82a58b4d87abc"
},
"truncated": 0,
"non_truncated": 53,
"padded": 159,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:MentalHealth|0": {
"hashes": {
"hash_examples": "8e5fc5c4704bd96b",
"hash_full_prompts": "aa24f48cf9143589",
"hash_input_tokens": "f15d52762441e7f2",
"hash_cont_tokens": "7b399d0f0a9124f1"
},
"truncated": 0,
"non_truncated": 76,
"padded": 228,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Offensive|0": {
"hashes": {
"hash_examples": "5ad4369b7dc5de46",
"hash_full_prompts": "5d11bea32fd09679",
"hash_input_tokens": "40d9b02fae2509c5",
"hash_cont_tokens": "0cd5015bc3370adf"
},
"truncated": 0,
"non_truncated": 69,
"padded": 207,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:PhysicalHealth|0": {
"hashes": {
"hash_examples": "dc2a632e2dcc86db",
"hash_full_prompts": "0029f010e68a62c6",
"hash_input_tokens": "fd4cc9f2a85a5e4a",
"hash_cont_tokens": "cb8655dcad91858d"
},
"truncated": 0,
"non_truncated": 73,
"padded": 210,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Privacy|0": {
"hashes": {
"hash_examples": "295e35448a39e003",
"hash_full_prompts": "a9a4901ba9341ff3",
"hash_input_tokens": "02ac4bfe0f739798",
"hash_cont_tokens": "7f23416c661e2ee5"
},
"truncated": 0,
"non_truncated": 57,
"padded": 162,
"non_padded": 9,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Trustfulness|0": {
"hashes": {
"hash_examples": "e79ac1ea5439e623",
"hash_full_prompts": "a0da4efc7f9ec52f",
"hash_input_tokens": "288cb0153aebf874",
"hash_cont_tokens": "ff874dba360c1ede"
},
"truncated": 0,
"non_truncated": 78,
"padded": 228,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|aratrust:Unfairness|0": {
"hashes": {
"hash_examples": "4ac5dccbfbdc5077",
"hash_full_prompts": "faf22f92faf61ba6",
"hash_input_tokens": "0e7a42b057c2b901",
"hash_cont_tokens": "3e990fe3a474dbc5"
},
"truncated": 0,
"non_truncated": 55,
"padded": 159,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"community|alrage_qa|0": {
"hashes": {
"hash_examples": "3edbbe22cabd4160",
"hash_full_prompts": "7fa5276938208dd2",
"hash_input_tokens": "338c70b76f6963cd",
"hash_cont_tokens": "00ca913d4ae03ccc"
},
"truncated": 2106,
"non_truncated": 0,
"padded": 2106,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "b8b3b49631adcc40",
"hash_full_prompts": "714dd98476cbf2fd",
"hash_input_tokens": "bc76b0162558a688",
"hash_cont_tokens": "7e1ab9db9272aef5"
},
"truncated": 219,
"non_truncated": 91643,
"padded": 333440,
"non_padded": 3444,
"num_truncated_few_shots": 0
}
}