v2_results
/
Syed-Hasan-8503
/Phi-3-mini-4K-instruct-cpo-simpo
/results_2025-01-19T14-23-14.824329.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": 0, | |
"start_time": 694.600731259, | |
"end_time": 10314.111117647, | |
"total_evaluation_time_secondes": "9619.510386388", | |
"model_name": "Syed-Hasan-8503/Phi-3-mini-4K-instruct-cpo-simpo", | |
"model_sha": "2896ef357be81fd433c17801d76ce148e60a7032", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "7.12 GB" | |
}, | |
"results": { | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"acc_norm": 0.25493716337522443, | |
"acc_norm_stderr": 0.018483134744241226 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"acc_norm": 0.3015755329008341, | |
"acc_norm_stderr": 0.006248881707241888 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"acc_norm": 0.33631284916201115, | |
"acc_norm_stderr": 0.01580100372914589 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"acc_norm": 0.5466666666666666, | |
"acc_norm_stderr": 0.05787010410349153 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"acc_norm": 0.44666666666666666, | |
"acc_norm_stderr": 0.04072790343023464 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"acc_norm": 0.48, | |
"acc_norm_stderr": 0.04092881363092388 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"acc_norm": 0.8422764227642277, | |
"acc_norm_stderr": 0.004076557376328293 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"acc_norm": 0.5494578815679733, | |
"acc_norm_stderr": 0.0064265299043689186 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"acc_norm": 0.3581395348837209, | |
"acc_norm_stderr": 0.011564011303981893 | |
}, | |
"community|arabic_exams|0": { | |
"acc_norm": 0.24767225325884543, | |
"acc_norm_stderr": 0.01864488987174101 | |
}, | |
"community|arabic_mmlu:Accounting (University)|0": { | |
"acc_norm": 0.28378378378378377, | |
"acc_norm_stderr": 0.05276603149821338 | |
}, | |
"community|arabic_mmlu:Arabic Language (General)|0": { | |
"acc_norm": 0.3709150326797386, | |
"acc_norm_stderr": 0.019542101564854114 | |
}, | |
"community|arabic_mmlu:Arabic Language (Grammar)|0": { | |
"acc_norm": 0.2958904109589041, | |
"acc_norm_stderr": 0.023924060011244693 | |
}, | |
"community|arabic_mmlu:Arabic Language (High School)|0": { | |
"acc_norm": 0.32051282051282054, | |
"acc_norm_stderr": 0.02366129639396428 | |
}, | |
"community|arabic_mmlu:Arabic Language (Middle School)|0": { | |
"acc_norm": 0.4444444444444444, | |
"acc_norm_stderr": 0.09745089103411436 | |
}, | |
"community|arabic_mmlu:Arabic Language (Primary School)|0": { | |
"acc_norm": 0.42063492063492064, | |
"acc_norm_stderr": 0.031159606103086497 | |
}, | |
"community|arabic_mmlu:Biology (High School)|0": { | |
"acc_norm": 0.37402413058907025, | |
"acc_norm_stderr": 0.012895176168376564 | |
}, | |
"community|arabic_mmlu:Civics (High School)|0": { | |
"acc_norm": 0.3103448275862069, | |
"acc_norm_stderr": 0.049887188500387446 | |
}, | |
"community|arabic_mmlu:Civics (Middle School)|0": { | |
"acc_norm": 0.2838983050847458, | |
"acc_norm_stderr": 0.029412695005168132 | |
}, | |
"community|arabic_mmlu:Computer Science (High School)|0": { | |
"acc_norm": 0.3524904214559387, | |
"acc_norm_stderr": 0.029628528627097646 | |
}, | |
"community|arabic_mmlu:Computer Science (Middle School)|0": { | |
"acc_norm": 0.4074074074074074, | |
"acc_norm_stderr": 0.09636202008710973 | |
}, | |
"community|arabic_mmlu:Computer Science (Primary School)|0": { | |
"acc_norm": 0.5421052631578948, | |
"acc_norm_stderr": 0.03624046284425957 | |
}, | |
"community|arabic_mmlu:Computer Science (University)|0": { | |
"acc_norm": 0.375, | |
"acc_norm_stderr": 0.060993754559283325 | |
}, | |
"community|arabic_mmlu:Driving Test|0": { | |
"acc_norm": 0.47481420313790257, | |
"acc_norm_stderr": 0.014355742239142846 | |
}, | |
"community|arabic_mmlu:Economics (High School)|0": { | |
"acc_norm": 0.3611111111111111, | |
"acc_norm_stderr": 0.02535046144872979 | |
}, | |
"community|arabic_mmlu:Economics (Middle School)|0": { | |
"acc_norm": 0.45977011494252873, | |
"acc_norm_stderr": 0.053741581963657706 | |
}, | |
"community|arabic_mmlu:Economics (University)|0": { | |
"acc_norm": 0.3284671532846715, | |
"acc_norm_stderr": 0.04027264457070886 | |
}, | |
"community|arabic_mmlu:General Knowledge|0": { | |
"acc_norm": 0.32060185185185186, | |
"acc_norm_stderr": 0.015886923153276587 | |
}, | |
"community|arabic_mmlu:General Knowledge (Middle School)|0": { | |
"acc_norm": 0.37790697674418605, | |
"acc_norm_stderr": 0.037078492187232796 | |
}, | |
"community|arabic_mmlu:General Knowledge (Primary School)|0": { | |
"acc_norm": 0.3395061728395062, | |
"acc_norm_stderr": 0.03732031330740126 | |
}, | |
"community|arabic_mmlu:Geography (High School)|0": { | |
"acc_norm": 0.27842003853564545, | |
"acc_norm_stderr": 0.013918841793523977 | |
}, | |
"community|arabic_mmlu:Geography (Middle School)|0": { | |
"acc_norm": 0.34191176470588236, | |
"acc_norm_stderr": 0.028814722422254184 | |
}, | |
"community|arabic_mmlu:Geography (Primary School)|0": { | |
"acc_norm": 0.43859649122807015, | |
"acc_norm_stderr": 0.0663095566682855 | |
}, | |
"community|arabic_mmlu:History (High School)|0": { | |
"acc_norm": 0.24342105263157895, | |
"acc_norm_stderr": 0.015577053313889633 | |
}, | |
"community|arabic_mmlu:History (Middle School)|0": { | |
"acc_norm": 0.3103448275862069, | |
"acc_norm_stderr": 0.03255086769970103 | |
}, | |
"community|arabic_mmlu:History (Primary School)|0": { | |
"acc_norm": 0.24509803921568626, | |
"acc_norm_stderr": 0.042801058373643966 | |
}, | |
"community|arabic_mmlu:Islamic Studies|0": { | |
"acc_norm": 0.24100156494522693, | |
"acc_norm_stderr": 0.016932458981082688 | |
}, | |
"community|arabic_mmlu:Islamic Studies (High School)|0": { | |
"acc_norm": 0.344311377245509, | |
"acc_norm_stderr": 0.026037686520768058 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { | |
"acc_norm": 0.36554621848739494, | |
"acc_norm_stderr": 0.031282177063684614 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { | |
"acc_norm": 0.48348348348348347, | |
"acc_norm_stderr": 0.01581858590399799 | |
}, | |
"community|arabic_mmlu:Law (Professional)|0": { | |
"acc_norm": 0.5286624203821656, | |
"acc_norm_stderr": 0.028215196869241975 | |
}, | |
"community|arabic_mmlu:Management (University)|0": { | |
"acc_norm": 0.56, | |
"acc_norm_stderr": 0.05770381035711357 | |
}, | |
"community|arabic_mmlu:Math (Primary School)|0": { | |
"acc_norm": 0.36185819070904646, | |
"acc_norm_stderr": 0.023790179661944647 | |
}, | |
"community|arabic_mmlu:Natural Science (Middle School)|0": { | |
"acc_norm": 0.359504132231405, | |
"acc_norm_stderr": 0.030910183844665075 | |
}, | |
"community|arabic_mmlu:Natural Science (Primary School)|0": { | |
"acc_norm": 0.44047619047619047, | |
"acc_norm_stderr": 0.027123648111288284 | |
}, | |
"community|arabic_mmlu:Philosophy (High School)|0": { | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.07647191129018724 | |
}, | |
"community|arabic_mmlu:Physics (High School)|0": { | |
"acc_norm": 0.23529411764705882, | |
"acc_norm_stderr": 0.026615577394272578 | |
}, | |
"community|arabic_mmlu:Political Science (University)|0": { | |
"acc_norm": 0.30952380952380953, | |
"acc_norm_stderr": 0.03197777494209474 | |
}, | |
"community|arabic_mmlu:Social Science (Middle School)|0": { | |
"acc_norm": 0.37344398340248963, | |
"acc_norm_stderr": 0.031223894073220737 | |
}, | |
"community|arabic_mmlu:Social Science (Primary School)|0": { | |
"acc_norm": 0.4723404255319149, | |
"acc_norm_stderr": 0.018815602958392012 | |
}, | |
"community|arabic_mmlu_ht:abstract_algebra|0": { | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.047258156262526045 | |
}, | |
"community|arabic_mmlu_ht:anatomy|0": { | |
"acc_norm": 0.3111111111111111, | |
"acc_norm_stderr": 0.03999262876617723 | |
}, | |
"community|arabic_mmlu_ht:astronomy|0": { | |
"acc_norm": 0.2894736842105263, | |
"acc_norm_stderr": 0.036906779861372814 | |
}, | |
"community|arabic_mmlu_ht:business_ethics|0": { | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.04725815626252605 | |
}, | |
"community|arabic_mmlu_ht:clinical_knowledge|0": { | |
"acc_norm": 0.3169811320754717, | |
"acc_norm_stderr": 0.028637235639800918 | |
}, | |
"community|arabic_mmlu_ht:college_biology|0": { | |
"acc_norm": 0.2569444444444444, | |
"acc_norm_stderr": 0.03653946969442099 | |
}, | |
"community|arabic_mmlu_ht:college_chemistry|0": { | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.046056618647183814 | |
}, | |
"community|arabic_mmlu_ht:college_computer_science|0": { | |
"acc_norm": 0.29, | |
"acc_norm_stderr": 0.04560480215720683 | |
}, | |
"community|arabic_mmlu_ht:college_mathematics|0": { | |
"acc_norm": 0.33, | |
"acc_norm_stderr": 0.04725815626252604 | |
}, | |
"community|arabic_mmlu_ht:college_medicine|0": { | |
"acc_norm": 0.2254335260115607, | |
"acc_norm_stderr": 0.03186209851641144 | |
}, | |
"community|arabic_mmlu_ht:college_physics|0": { | |
"acc_norm": 0.16666666666666666, | |
"acc_norm_stderr": 0.03708284662416545 | |
}, | |
"community|arabic_mmlu_ht:computer_security|0": { | |
"acc_norm": 0.38, | |
"acc_norm_stderr": 0.04878317312145632 | |
}, | |
"community|arabic_mmlu_ht:conceptual_physics|0": { | |
"acc_norm": 0.2723404255319149, | |
"acc_norm_stderr": 0.029101290698386698 | |
}, | |
"community|arabic_mmlu_ht:econometrics|0": { | |
"acc_norm": 0.23684210526315788, | |
"acc_norm_stderr": 0.03999423879281336 | |
}, | |
"community|arabic_mmlu_ht:electrical_engineering|0": { | |
"acc_norm": 0.3793103448275862, | |
"acc_norm_stderr": 0.040434618619167466 | |
}, | |
"community|arabic_mmlu_ht:elementary_mathematics|0": { | |
"acc_norm": 0.2857142857142857, | |
"acc_norm_stderr": 0.02326651221373057 | |
}, | |
"community|arabic_mmlu_ht:formal_logic|0": { | |
"acc_norm": 0.3412698412698413, | |
"acc_norm_stderr": 0.04240799327574924 | |
}, | |
"community|arabic_mmlu_ht:global_facts|0": { | |
"acc_norm": 0.26, | |
"acc_norm_stderr": 0.04408440022768077 | |
}, | |
"community|arabic_mmlu_ht:high_school_biology|0": { | |
"acc_norm": 0.3000262260687123, | |
"acc_norm_stderr": 0.0074223947478695125 | |
}, | |
"community|arabic_mmlu_ht:high_school_chemistry|0": { | |
"acc_norm": 0.2963147410358566, | |
"acc_norm_stderr": 0.007206478254809431 | |
}, | |
"community|arabic_mmlu_ht:high_school_computer_science|0": { | |
"acc_norm": 0.36, | |
"acc_norm_stderr": 0.048241815132442176 | |
}, | |
"community|arabic_mmlu_ht:high_school_european_history|0": { | |
"acc_norm": 0.28581943081452404, | |
"acc_norm_stderr": 0.0050043127490664125 | |
}, | |
"community|arabic_mmlu_ht:high_school_geography|0": { | |
"acc_norm": 0.3181818181818182, | |
"acc_norm_stderr": 0.03318477333845331 | |
}, | |
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { | |
"acc_norm": 0.2538860103626943, | |
"acc_norm_stderr": 0.0314102478056532 | |
}, | |
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { | |
"acc_norm": 0.30058803182289867, | |
"acc_norm_stderr": 0.008529104800718584 | |
}, | |
"community|arabic_mmlu_ht:high_school_mathematics|0": { | |
"acc_norm": 0.3037037037037037, | |
"acc_norm_stderr": 0.028037929969115 | |
}, | |
"community|arabic_mmlu_ht:high_school_microeconomics|0": { | |
"acc_norm": 0.31932773109243695, | |
"acc_norm_stderr": 0.030283995525884396 | |
}, | |
"community|arabic_mmlu_ht:high_school_physics|0": { | |
"acc_norm": 0.2847682119205298, | |
"acc_norm_stderr": 0.03684881521389023 | |
}, | |
"community|arabic_mmlu_ht:high_school_psychology|0": { | |
"acc_norm": 0.28073394495412846, | |
"acc_norm_stderr": 0.019266055045871623 | |
}, | |
"community|arabic_mmlu_ht:high_school_statistics|0": { | |
"acc_norm": 0.29560491493383745, | |
"acc_norm_stderr": 0.0070152389953142295 | |
}, | |
"community|arabic_mmlu_ht:high_school_us_history|0": { | |
"acc_norm": 0.3627450980392157, | |
"acc_norm_stderr": 0.03374499356319355 | |
}, | |
"community|arabic_mmlu_ht:high_school_world_history|0": { | |
"acc_norm": 0.32489451476793246, | |
"acc_norm_stderr": 0.030486039389105303 | |
}, | |
"community|arabic_mmlu_ht:human_aging|0": { | |
"acc_norm": 0.2914798206278027, | |
"acc_norm_stderr": 0.03050028317654591 | |
}, | |
"community|arabic_mmlu_ht:human_sexuality|0": { | |
"acc_norm": 0.2900763358778626, | |
"acc_norm_stderr": 0.03980066246467765 | |
}, | |
"community|arabic_mmlu_ht:international_law|0": { | |
"acc_norm": 0.4132231404958678, | |
"acc_norm_stderr": 0.04495087843548408 | |
}, | |
"community|arabic_mmlu_ht:jurisprudence|0": { | |
"acc_norm": 0.3611111111111111, | |
"acc_norm_stderr": 0.04643454608906274 | |
}, | |
"community|arabic_mmlu_ht:logical_fallacies|0": { | |
"acc_norm": 0.3619631901840491, | |
"acc_norm_stderr": 0.037757007291414416 | |
}, | |
"community|arabic_mmlu_ht:machine_learning|0": { | |
"acc_norm": 0.33035714285714285, | |
"acc_norm_stderr": 0.04464285714285713 | |
}, | |
"community|arabic_mmlu_ht:management|0": { | |
"acc_norm": 0.34951456310679613, | |
"acc_norm_stderr": 0.04721188506097173 | |
}, | |
"community|arabic_mmlu_ht:marketing|0": { | |
"acc_norm": 0.45726495726495725, | |
"acc_norm_stderr": 0.03263622596380688 | |
}, | |
"community|arabic_mmlu_ht:medical_genetics|0": { | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"community|arabic_mmlu_ht:miscellaneous|0": { | |
"acc_norm": 0.29958677685950413, | |
"acc_norm_stderr": 0.009313663745529553 | |
}, | |
"community|arabic_mmlu_ht:moral_disputes|0": { | |
"acc_norm": 0.2745664739884393, | |
"acc_norm_stderr": 0.024027745155265012 | |
}, | |
"community|arabic_mmlu_ht:moral_scenarios|0": { | |
"acc_norm": 0.24134078212290502, | |
"acc_norm_stderr": 0.014310999547961459 | |
}, | |
"community|arabic_mmlu_ht:nutrition|0": { | |
"acc_norm": 0.35294117647058826, | |
"acc_norm_stderr": 0.02736359328468494 | |
}, | |
"community|arabic_mmlu_ht:philosophy|0": { | |
"acc_norm": 0.2829581993569132, | |
"acc_norm_stderr": 0.02558306248998484 | |
}, | |
"community|arabic_mmlu_ht:prehistory|0": { | |
"acc_norm": 0.25308641975308643, | |
"acc_norm_stderr": 0.024191808600713002 | |
}, | |
"community|arabic_mmlu_ht:professional_accounting|0": { | |
"acc_norm": 0.29508196721311475, | |
"acc_norm_stderr": 0.006789033920445403 | |
}, | |
"community|arabic_mmlu_ht:professional_law|0": { | |
"acc_norm": 0.28358582696882434, | |
"acc_norm_stderr": 0.005043821313987741 | |
}, | |
"community|arabic_mmlu_ht:professional_medicine|0": { | |
"acc_norm": 0.2944410507025046, | |
"acc_norm_stderr": 0.011268709472728186 | |
}, | |
"community|arabic_mmlu_ht:professional_psychology|0": { | |
"acc_norm": 0.29888666856979734, | |
"acc_norm_stderr": 0.007735516447651808 | |
}, | |
"community|arabic_mmlu_ht:public_relations|0": { | |
"acc_norm": 0.39090909090909093, | |
"acc_norm_stderr": 0.04673752333670239 | |
}, | |
"community|arabic_mmlu_ht:security_studies|0": { | |
"acc_norm": 0.19183673469387755, | |
"acc_norm_stderr": 0.02520696315422538 | |
}, | |
"community|arabic_mmlu_ht:sociology|0": { | |
"acc_norm": 0.3034825870646766, | |
"acc_norm_stderr": 0.03251006816458619 | |
}, | |
"community|arabic_mmlu_ht:us_foreign_policy|0": { | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.049020713000019756 | |
}, | |
"community|arabic_mmlu_ht:virology|0": { | |
"acc_norm": 0.27710843373493976, | |
"acc_norm_stderr": 0.034843315926805875 | |
}, | |
"community|arabic_mmlu_ht:world_religions|0": { | |
"acc_norm": 0.3567251461988304, | |
"acc_norm_stderr": 0.03674013002860954 | |
}, | |
"community|madinah_qa:Arabic Language (General)|0": { | |
"acc_norm": 0.3464052287581699, | |
"acc_norm_stderr": 0.01924978569171721 | |
}, | |
"community|madinah_qa:Arabic Language (Grammar)|0": { | |
"acc_norm": 0.38082191780821917, | |
"acc_norm_stderr": 0.02545177430315645 | |
}, | |
"community|aratrust:Ethics|0": { | |
"acc_norm": 0.5166666666666667, | |
"acc_norm_stderr": 0.06505828185300304 | |
}, | |
"community|aratrust:Illegal|0": { | |
"acc_norm": 0.5283018867924528, | |
"acc_norm_stderr": 0.06922635714201347 | |
}, | |
"community|aratrust:MentalHealth|0": { | |
"acc_norm": 0.6052631578947368, | |
"acc_norm_stderr": 0.056441080498755805 | |
}, | |
"community|aratrust:Offensive|0": { | |
"acc_norm": 0.8405797101449275, | |
"acc_norm_stderr": 0.04439221265797129 | |
}, | |
"community|aratrust:PhysicalHealth|0": { | |
"acc_norm": 0.5068493150684932, | |
"acc_norm_stderr": 0.058920036075634445 | |
}, | |
"community|aratrust:Privacy|0": { | |
"acc_norm": 0.5789473684210527, | |
"acc_norm_stderr": 0.06597717584505354 | |
}, | |
"community|aratrust:Trustfulness|0": { | |
"acc_norm": 0.3974358974358974, | |
"acc_norm_stderr": 0.05576860878238167 | |
}, | |
"community|aratrust:Unfairness|0": { | |
"acc_norm": 0.4909090909090909, | |
"acc_norm_stderr": 0.06803013430498077 | |
}, | |
"community|alghafa:_average|0": { | |
"acc_norm": 0.45733696866525825, | |
"acc_norm_stderr": 0.02245854888110646 | |
}, | |
"community|arabic_mmlu:_average|0": { | |
"acc_norm": 0.3677550203377433, | |
"acc_norm_stderr": 0.03577051898776405 | |
}, | |
"community|arabic_mmlu_ht:_average|0": { | |
"acc_norm": 0.3100036761571498, | |
"acc_norm_stderr": 0.03146879207976135 | |
}, | |
"community|madinah_qa:_average|0": { | |
"acc_norm": 0.36361357328319455, | |
"acc_norm_stderr": 0.02235077999743683 | |
}, | |
"community|aratrust:_average|0": { | |
"acc_norm": 0.5581191366666648, | |
"acc_norm_stderr": 0.06047673589497425 | |
}, | |
"all": { | |
"acc_norm": 0.3437737841493562, | |
"acc_norm_stderr": 0.032018488971133297, | |
"f1": 0.0, | |
"f1_stderr": 0.0 | |
}, | |
"community|alrage_qa|0": { | |
"llm_as_judge": 0.4228395061728394, | |
"llm_as_judge_stderr": 0.0002838556404206032 | |
} | |
}, | |
"versions": { | |
"community|alghafa:mcq_exams_test_ar|0": 0, | |
"community|alghafa:meta_ar_dialects|0": 0, | |
"community|alghafa:meta_ar_msa|0": 0, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, | |
"community|alghafa:multiple_choice_sentiment_task|0": 0, | |
"community|arabic_exams|0": 0, | |
"community|arabic_mmlu:Accounting (University)|0": 0, | |
"community|arabic_mmlu:Arabic Language (General)|0": 0, | |
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0, | |
"community|arabic_mmlu:Arabic Language (High School)|0": 0, | |
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0, | |
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0, | |
"community|arabic_mmlu:Biology (High School)|0": 0, | |
"community|arabic_mmlu:Civics (High School)|0": 0, | |
"community|arabic_mmlu:Civics (Middle School)|0": 0, | |
"community|arabic_mmlu:Computer Science (High School)|0": 0, | |
"community|arabic_mmlu:Computer Science (Middle School)|0": 0, | |
"community|arabic_mmlu:Computer Science (Primary School)|0": 0, | |
"community|arabic_mmlu:Computer Science (University)|0": 0, | |
"community|arabic_mmlu:Driving Test|0": 0, | |
"community|arabic_mmlu:Economics (High School)|0": 0, | |
"community|arabic_mmlu:Economics (Middle School)|0": 0, | |
"community|arabic_mmlu:Economics (University)|0": 0, | |
"community|arabic_mmlu:General Knowledge|0": 0, | |
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0, | |
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0, | |
"community|arabic_mmlu:Geography (High School)|0": 0, | |
"community|arabic_mmlu:Geography (Middle School)|0": 0, | |
"community|arabic_mmlu:Geography (Primary School)|0": 0, | |
"community|arabic_mmlu:History (High School)|0": 0, | |
"community|arabic_mmlu:History (Middle School)|0": 0, | |
"community|arabic_mmlu:History (Primary School)|0": 0, | |
"community|arabic_mmlu:Islamic Studies|0": 0, | |
"community|arabic_mmlu:Islamic Studies (High School)|0": 0, | |
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0, | |
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0, | |
"community|arabic_mmlu:Law (Professional)|0": 0, | |
"community|arabic_mmlu:Management (University)|0": 0, | |
"community|arabic_mmlu:Math (Primary School)|0": 0, | |
"community|arabic_mmlu:Natural Science (Middle School)|0": 0, | |
"community|arabic_mmlu:Natural Science (Primary School)|0": 0, | |
"community|arabic_mmlu:Philosophy (High School)|0": 0, | |
"community|arabic_mmlu:Physics (High School)|0": 0, | |
"community|arabic_mmlu:Political Science (University)|0": 0, | |
"community|arabic_mmlu:Social Science (Middle School)|0": 0, | |
"community|arabic_mmlu:Social Science (Primary School)|0": 0, | |
"community|arabic_mmlu_ht:abstract_algebra|0": 0, | |
"community|arabic_mmlu_ht:anatomy|0": 0, | |
"community|arabic_mmlu_ht:astronomy|0": 0, | |
"community|arabic_mmlu_ht:business_ethics|0": 0, | |
"community|arabic_mmlu_ht:clinical_knowledge|0": 0, | |
"community|arabic_mmlu_ht:college_biology|0": 0, | |
"community|arabic_mmlu_ht:college_chemistry|0": 0, | |
"community|arabic_mmlu_ht:college_computer_science|0": 0, | |
"community|arabic_mmlu_ht:college_mathematics|0": 0, | |
"community|arabic_mmlu_ht:college_medicine|0": 0, | |
"community|arabic_mmlu_ht:college_physics|0": 0, | |
"community|arabic_mmlu_ht:computer_security|0": 0, | |
"community|arabic_mmlu_ht:conceptual_physics|0": 0, | |
"community|arabic_mmlu_ht:econometrics|0": 0, | |
"community|arabic_mmlu_ht:electrical_engineering|0": 0, | |
"community|arabic_mmlu_ht:elementary_mathematics|0": 0, | |
"community|arabic_mmlu_ht:formal_logic|0": 0, | |
"community|arabic_mmlu_ht:global_facts|0": 0, | |
"community|arabic_mmlu_ht:high_school_biology|0": 0, | |
"community|arabic_mmlu_ht:high_school_chemistry|0": 0, | |
"community|arabic_mmlu_ht:high_school_computer_science|0": 0, | |
"community|arabic_mmlu_ht:high_school_european_history|0": 0, | |
"community|arabic_mmlu_ht:high_school_geography|0": 0, | |
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0, | |
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0, | |
"community|arabic_mmlu_ht:high_school_mathematics|0": 0, | |
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0, | |
"community|arabic_mmlu_ht:high_school_physics|0": 0, | |
"community|arabic_mmlu_ht:high_school_psychology|0": 0, | |
"community|arabic_mmlu_ht:high_school_statistics|0": 0, | |
"community|arabic_mmlu_ht:high_school_us_history|0": 0, | |
"community|arabic_mmlu_ht:high_school_world_history|0": 0, | |
"community|arabic_mmlu_ht:human_aging|0": 0, | |
"community|arabic_mmlu_ht:human_sexuality|0": 0, | |
"community|arabic_mmlu_ht:international_law|0": 0, | |
"community|arabic_mmlu_ht:jurisprudence|0": 0, | |
"community|arabic_mmlu_ht:logical_fallacies|0": 0, | |
"community|arabic_mmlu_ht:machine_learning|0": 0, | |
"community|arabic_mmlu_ht:management|0": 0, | |
"community|arabic_mmlu_ht:marketing|0": 0, | |
"community|arabic_mmlu_ht:medical_genetics|0": 0, | |
"community|arabic_mmlu_ht:miscellaneous|0": 0, | |
"community|arabic_mmlu_ht:moral_disputes|0": 0, | |
"community|arabic_mmlu_ht:moral_scenarios|0": 0, | |
"community|arabic_mmlu_ht:nutrition|0": 0, | |
"community|arabic_mmlu_ht:philosophy|0": 0, | |
"community|arabic_mmlu_ht:prehistory|0": 0, | |
"community|arabic_mmlu_ht:professional_accounting|0": 0, | |
"community|arabic_mmlu_ht:professional_law|0": 0, | |
"community|arabic_mmlu_ht:professional_medicine|0": 0, | |
"community|arabic_mmlu_ht:professional_psychology|0": 0, | |
"community|arabic_mmlu_ht:public_relations|0": 0, | |
"community|arabic_mmlu_ht:security_studies|0": 0, | |
"community|arabic_mmlu_ht:sociology|0": 0, | |
"community|arabic_mmlu_ht:us_foreign_policy|0": 0, | |
"community|arabic_mmlu_ht:virology|0": 0, | |
"community|arabic_mmlu_ht:world_religions|0": 0, | |
"community|aratrust:Ethics|0": 0, | |
"community|aratrust:Illegal|0": 0, | |
"community|aratrust:MentalHealth|0": 0, | |
"community|aratrust:Offensive|0": 0, | |
"community|aratrust:PhysicalHealth|0": 0, | |
"community|aratrust:Privacy|0": 0, | |
"community|aratrust:Trustfulness|0": 0, | |
"community|aratrust:Unfairness|0": 0, | |
"community|madinah_qa:Arabic Language (General)|0": 0, | |
"community|madinah_qa:Arabic Language (Grammar)|0": 0, | |
"community|alrage_qa|0": 0 | |
}, | |
"config_tasks": { | |
"community|alghafa:mcq_exams_test_ar": { | |
"name": "alghafa:mcq_exams_test_ar", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "mcq_exams_test_ar", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 557, | |
"effective_num_docs": 557, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_dialects": { | |
"name": "alghafa:meta_ar_dialects", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_dialects", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5395, | |
"effective_num_docs": 5395, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_msa": { | |
"name": "alghafa:meta_ar_msa", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_msa", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { | |
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 75, | |
"effective_num_docs": 75, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_soqal_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 7995, | |
"effective_num_docs": 7995, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5995, | |
"effective_num_docs": 5995, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task": { | |
"name": "alghafa:multiple_choice_sentiment_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_sentiment_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1720, | |
"effective_num_docs": 1720, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_exams": { | |
"name": "arabic_exams", | |
"prompt_function": "arabic_exams_pfn", | |
"hf_repo": "OALL/Arabic_EXAMS", | |
"hf_subset": "default", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 537, | |
"effective_num_docs": 537, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Accounting (University)": { | |
"name": "arabic_mmlu:Accounting (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Accounting (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 74, | |
"effective_num_docs": 74, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (General)": { | |
"name": "arabic_mmlu:Arabic Language (General)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (General)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Grammar)": { | |
"name": "arabic_mmlu:Arabic Language (Grammar)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (Grammar)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 365, | |
"effective_num_docs": 365, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (High School)": { | |
"name": "arabic_mmlu:Arabic Language (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 390, | |
"effective_num_docs": 390, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Middle School)": { | |
"name": "arabic_mmlu:Arabic Language (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 27, | |
"effective_num_docs": 27, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Primary School)": { | |
"name": "arabic_mmlu:Arabic Language (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 252, | |
"effective_num_docs": 252, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Biology (High School)": { | |
"name": "arabic_mmlu:Biology (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Biology (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1409, | |
"effective_num_docs": 1409, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Civics (High School)": { | |
"name": "arabic_mmlu:Civics (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Civics (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 87, | |
"effective_num_docs": 87, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Civics (Middle School)": { | |
"name": "arabic_mmlu:Civics (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Civics (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 236, | |
"effective_num_docs": 236, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (High School)": { | |
"name": "arabic_mmlu:Computer Science (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 261, | |
"effective_num_docs": 261, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Middle School)": { | |
"name": "arabic_mmlu:Computer Science (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 27, | |
"effective_num_docs": 27, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Primary School)": { | |
"name": "arabic_mmlu:Computer Science (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 190, | |
"effective_num_docs": 190, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (University)": { | |
"name": "arabic_mmlu:Computer Science (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 64, | |
"effective_num_docs": 64, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Driving Test": { | |
"name": "arabic_mmlu:Driving Test", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Driving Test", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1211, | |
"effective_num_docs": 1211, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Economics (High School)": { | |
"name": "arabic_mmlu:Economics (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Economics (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 360, | |
"effective_num_docs": 360, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Economics (Middle School)": { | |
"name": "arabic_mmlu:Economics (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Economics (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 87, | |
"effective_num_docs": 87, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Economics (University)": { | |
"name": "arabic_mmlu:Economics (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Economics (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 137, | |
"effective_num_docs": 137, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge": { | |
"name": "arabic_mmlu:General Knowledge", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "General Knowledge", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 864, | |
"effective_num_docs": 864, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Middle School)": { | |
"name": "arabic_mmlu:General Knowledge (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "General Knowledge (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 172, | |
"effective_num_docs": 172, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Primary School)": { | |
"name": "arabic_mmlu:General Knowledge (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "General Knowledge (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 162, | |
"effective_num_docs": 162, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Geography (High School)": { | |
"name": "arabic_mmlu:Geography (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Geography (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1038, | |
"effective_num_docs": 1038, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Geography (Middle School)": { | |
"name": "arabic_mmlu:Geography (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Geography (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 272, | |
"effective_num_docs": 272, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Geography (Primary School)": { | |
"name": "arabic_mmlu:Geography (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Geography (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 57, | |
"effective_num_docs": 57, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:History (High School)": { | |
"name": "arabic_mmlu:History (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "History (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 760, | |
"effective_num_docs": 760, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:History (Middle School)": { | |
"name": "arabic_mmlu:History (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "History (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 203, | |
"effective_num_docs": 203, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:History (Primary School)": { | |
"name": "arabic_mmlu:History (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "History (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies": { | |
"name": "arabic_mmlu:Islamic Studies", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 639, | |
"effective_num_docs": 639, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (High School)": { | |
"name": "arabic_mmlu:Islamic Studies (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 334, | |
"effective_num_docs": 334, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Middle School)": { | |
"name": "arabic_mmlu:Islamic Studies (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Primary School)": { | |
"name": "arabic_mmlu:Islamic Studies (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 999, | |
"effective_num_docs": 999, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Law (Professional)": { | |
"name": "arabic_mmlu:Law (Professional)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Law (Professional)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 314, | |
"effective_num_docs": 314, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Management (University)": { | |
"name": "arabic_mmlu:Management (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Management (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 75, | |
"effective_num_docs": 75, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Math (Primary School)": { | |
"name": "arabic_mmlu:Math (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Math (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 409, | |
"effective_num_docs": 409, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Middle School)": { | |
"name": "arabic_mmlu:Natural Science (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Natural Science (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 242, | |
"effective_num_docs": 242, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Primary School)": { | |
"name": "arabic_mmlu:Natural Science (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Natural Science (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 336, | |
"effective_num_docs": 336, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Philosophy (High School)": { | |
"name": "arabic_mmlu:Philosophy (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Philosophy (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 39, | |
"effective_num_docs": 39, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Physics (High School)": { | |
"name": "arabic_mmlu:Physics (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Physics (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 255, | |
"effective_num_docs": 255, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Political Science (University)": { | |
"name": "arabic_mmlu:Political Science (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Political Science (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Middle School)": { | |
"name": "arabic_mmlu:Social Science (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Social Science (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 241, | |
"effective_num_docs": 241, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Primary School)": { | |
"name": "arabic_mmlu:Social Science (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Social Science (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 705, | |
"effective_num_docs": 705, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:abstract_algebra": { | |
"name": "arabic_mmlu_ht:abstract_algebra", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "abstract_algebra", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:anatomy": { | |
"name": "arabic_mmlu_ht:anatomy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "anatomy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 135, | |
"effective_num_docs": 135, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:astronomy": { | |
"name": "arabic_mmlu_ht:astronomy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "astronomy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 152, | |
"effective_num_docs": 152, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:business_ethics": { | |
"name": "arabic_mmlu_ht:business_ethics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "business_ethics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:clinical_knowledge": { | |
"name": "arabic_mmlu_ht:clinical_knowledge", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "clinical_knowledge", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_biology": { | |
"name": "arabic_mmlu_ht:college_biology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_biology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 144, | |
"effective_num_docs": 144, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_chemistry": { | |
"name": "arabic_mmlu_ht:college_chemistry", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_chemistry", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_computer_science": { | |
"name": "arabic_mmlu_ht:college_computer_science", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_computer_science", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_mathematics": { | |
"name": "arabic_mmlu_ht:college_mathematics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_mathematics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_medicine": { | |
"name": "arabic_mmlu_ht:college_medicine", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_medicine", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 173, | |
"effective_num_docs": 173, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_physics": { | |
"name": "arabic_mmlu_ht:college_physics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_physics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:computer_security": { | |
"name": "arabic_mmlu_ht:computer_security", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "computer_security", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:conceptual_physics": { | |
"name": "arabic_mmlu_ht:conceptual_physics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "conceptual_physics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:econometrics": { | |
"name": "arabic_mmlu_ht:econometrics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "econometrics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 114, | |
"effective_num_docs": 114, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:electrical_engineering": { | |
"name": "arabic_mmlu_ht:electrical_engineering", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "electrical_engineering", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:elementary_mathematics": { | |
"name": "arabic_mmlu_ht:elementary_mathematics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "elementary_mathematics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 378, | |
"effective_num_docs": 378, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:formal_logic": { | |
"name": "arabic_mmlu_ht:formal_logic", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "formal_logic", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 126, | |
"effective_num_docs": 126, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:global_facts": { | |
"name": "arabic_mmlu_ht:global_facts", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "global_facts", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_biology": { | |
"name": "arabic_mmlu_ht:high_school_biology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_biology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 3813, | |
"effective_num_docs": 3813, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_chemistry": { | |
"name": "arabic_mmlu_ht:high_school_chemistry", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_chemistry", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4016, | |
"effective_num_docs": 4016, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_computer_science": { | |
"name": "arabic_mmlu_ht:high_school_computer_science", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_computer_science", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_european_history": { | |
"name": "arabic_mmlu_ht:high_school_european_history", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_european_history", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 8152, | |
"effective_num_docs": 8152, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_geography": { | |
"name": "arabic_mmlu_ht:high_school_geography", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_geography", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 198, | |
"effective_num_docs": 198, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_government_and_politics": { | |
"name": "arabic_mmlu_ht:high_school_government_and_politics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_government_and_politics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 193, | |
"effective_num_docs": 193, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_macroeconomics": { | |
"name": "arabic_mmlu_ht:high_school_macroeconomics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_macroeconomics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2891, | |
"effective_num_docs": 2891, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_mathematics": { | |
"name": "arabic_mmlu_ht:high_school_mathematics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_mathematics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 270, | |
"effective_num_docs": 270, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_microeconomics": { | |
"name": "arabic_mmlu_ht:high_school_microeconomics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_microeconomics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_physics": { | |
"name": "arabic_mmlu_ht:high_school_physics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_physics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 151, | |
"effective_num_docs": 151, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_psychology": { | |
"name": "arabic_mmlu_ht:high_school_psychology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_psychology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 545, | |
"effective_num_docs": 545, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_statistics": { | |
"name": "arabic_mmlu_ht:high_school_statistics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_statistics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4232, | |
"effective_num_docs": 4232, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_us_history": { | |
"name": "arabic_mmlu_ht:high_school_us_history", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_us_history", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 204, | |
"effective_num_docs": 204, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_world_history": { | |
"name": "arabic_mmlu_ht:high_school_world_history", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_world_history", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 237, | |
"effective_num_docs": 237, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:human_aging": { | |
"name": "arabic_mmlu_ht:human_aging", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "human_aging", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 223, | |
"effective_num_docs": 223, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:human_sexuality": { | |
"name": "arabic_mmlu_ht:human_sexuality", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "human_sexuality", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 131, | |
"effective_num_docs": 131, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:international_law": { | |
"name": "arabic_mmlu_ht:international_law", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "international_law", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 121, | |
"effective_num_docs": 121, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:jurisprudence": { | |
"name": "arabic_mmlu_ht:jurisprudence", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "jurisprudence", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 108, | |
"effective_num_docs": 108, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:logical_fallacies": { | |
"name": "arabic_mmlu_ht:logical_fallacies", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "logical_fallacies", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 163, | |
"effective_num_docs": 163, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:machine_learning": { | |
"name": "arabic_mmlu_ht:machine_learning", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "machine_learning", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 112, | |
"effective_num_docs": 112, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:management": { | |
"name": "arabic_mmlu_ht:management", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "management", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 103, | |
"effective_num_docs": 103, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:marketing": { | |
"name": "arabic_mmlu_ht:marketing", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "marketing", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 234, | |
"effective_num_docs": 234, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:medical_genetics": { | |
"name": "arabic_mmlu_ht:medical_genetics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "medical_genetics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:miscellaneous": { | |
"name": "arabic_mmlu_ht:miscellaneous", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "miscellaneous", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2420, | |
"effective_num_docs": 2420, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_disputes": { | |
"name": "arabic_mmlu_ht:moral_disputes", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "moral_disputes", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 346, | |
"effective_num_docs": 346, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_scenarios": { | |
"name": "arabic_mmlu_ht:moral_scenarios", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "moral_scenarios", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:nutrition": { | |
"name": "arabic_mmlu_ht:nutrition", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "nutrition", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:philosophy": { | |
"name": "arabic_mmlu_ht:philosophy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "philosophy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 311, | |
"effective_num_docs": 311, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:prehistory": { | |
"name": "arabic_mmlu_ht:prehistory", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "prehistory", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 324, | |
"effective_num_docs": 324, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_accounting": { | |
"name": "arabic_mmlu_ht:professional_accounting", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_accounting", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4514, | |
"effective_num_docs": 4514, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_law": { | |
"name": "arabic_mmlu_ht:professional_law", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_law", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 7987, | |
"effective_num_docs": 7987, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_medicine": { | |
"name": "arabic_mmlu_ht:professional_medicine", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_medicine", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1637, | |
"effective_num_docs": 1637, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_psychology": { | |
"name": "arabic_mmlu_ht:professional_psychology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_psychology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 3503, | |
"effective_num_docs": 3503, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:public_relations": { | |
"name": "arabic_mmlu_ht:public_relations", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "public_relations", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 110, | |
"effective_num_docs": 110, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:security_studies": { | |
"name": "arabic_mmlu_ht:security_studies", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "security_studies", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 245, | |
"effective_num_docs": 245, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:sociology": { | |
"name": "arabic_mmlu_ht:sociology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "sociology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 201, | |
"effective_num_docs": 201, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:us_foreign_policy": { | |
"name": "arabic_mmlu_ht:us_foreign_policy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "us_foreign_policy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:virology": { | |
"name": "arabic_mmlu_ht:virology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "virology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 166, | |
"effective_num_docs": 166, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:world_religions": { | |
"name": "arabic_mmlu_ht:world_religions", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "world_religions", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 171, | |
"effective_num_docs": 171, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Ethics": { | |
"name": "aratrust:Ethics", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Ethics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 60, | |
"effective_num_docs": 60, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Illegal": { | |
"name": "aratrust:Illegal", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Illegal", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 53, | |
"effective_num_docs": 53, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:MentalHealth": { | |
"name": "aratrust:MentalHealth", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "MentalHealth", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 76, | |
"effective_num_docs": 76, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Offensive": { | |
"name": "aratrust:Offensive", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Offensive", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 69, | |
"effective_num_docs": 69, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:PhysicalHealth": { | |
"name": "aratrust:PhysicalHealth", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "PhysicalHealth", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 73, | |
"effective_num_docs": 73, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Privacy": { | |
"name": "aratrust:Privacy", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Privacy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 57, | |
"effective_num_docs": 57, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Trustfulness": { | |
"name": "aratrust:Trustfulness", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Trustfulness", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 78, | |
"effective_num_docs": 78, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Unfairness": { | |
"name": "aratrust:Unfairness", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Unfairness", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 55, | |
"effective_num_docs": 55, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|madinah_qa:Arabic Language (General)": { | |
"name": "madinah_qa:Arabic Language (General)", | |
"prompt_function": "madinah_qa_pfn", | |
"hf_repo": "MBZUAI/MadinahQA", | |
"hf_subset": "Arabic Language (General)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|madinah_qa:Arabic Language (Grammar)": { | |
"name": "madinah_qa:Arabic Language (Grammar)", | |
"prompt_function": "madinah_qa_pfn", | |
"hf_repo": "MBZUAI/MadinahQA", | |
"hf_subset": "Arabic Language (Grammar)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 365, | |
"effective_num_docs": 365, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alrage_qa": { | |
"name": "alrage_qa", | |
"prompt_function": "qa_prompt_arabic", | |
"hf_repo": "OALL/ALRAGE", | |
"hf_subset": null, | |
"metric": [ | |
{ | |
"metric_name": "llm_as_judge", | |
"higher_is_better": true, | |
"category": "7", | |
"use_case": "10", | |
"sample_level_fn": "_sample_level_fn", | |
"corpus_level_fn": "aggregate_scores" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 200, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2106, | |
"effective_num_docs": 2106, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
} | |
}, | |
"summary_tasks": { | |
"community|alghafa:mcq_exams_test_ar|0": { | |
"hashes": { | |
"hash_examples": "c07a5e78c5c0b8fe", | |
"hash_full_prompts": "7c314443fa971313", | |
"hash_input_tokens": "f43ab5e750d34e08", | |
"hash_cont_tokens": "f17bbef7761d70b4" | |
}, | |
"truncated": 0, | |
"non_truncated": 557, | |
"padded": 2228, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_dialects|0": { | |
"hashes": { | |
"hash_examples": "c0b6081f83e14064", | |
"hash_full_prompts": "f8d182f2cbf4f3a3", | |
"hash_input_tokens": "c5f259fd2bbb6bb6", | |
"hash_cont_tokens": "6a46f0b9d20fa63d" | |
}, | |
"truncated": 0, | |
"non_truncated": 5395, | |
"padded": 21571, | |
"non_padded": 9, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"hashes": { | |
"hash_examples": "64eb78a7c5b7484b", | |
"hash_full_prompts": "b6527d65d504e2f9", | |
"hash_input_tokens": "79ce9fb08908612b", | |
"hash_cont_tokens": "ccb9726e9233d849" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3569, | |
"non_padded": 11, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"hashes": { | |
"hash_examples": "54fc3502c1c02c06", | |
"hash_full_prompts": "6b1d6357748df96e", | |
"hash_input_tokens": "2fccae58d70649d8", | |
"hash_cont_tokens": "fc6ebea9242b4fd5" | |
}, | |
"truncated": 0, | |
"non_truncated": 75, | |
"padded": 150, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"hashes": { | |
"hash_examples": "46572d83696552ae", | |
"hash_full_prompts": "7aace0df16f53277", | |
"hash_input_tokens": "78aeb894cdba19de", | |
"hash_cont_tokens": "e369a54044179ca9" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 750, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"hashes": { | |
"hash_examples": "f430d97ff715bc1c", | |
"hash_full_prompts": "6f52ada7014c1055", | |
"hash_input_tokens": "bd4a1094f457824b", | |
"hash_cont_tokens": "5cd1ef07693abb43" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 746, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"hashes": { | |
"hash_examples": "6b70a7416584f98c", | |
"hash_full_prompts": "b5bbac52d79ddf20", | |
"hash_input_tokens": "2ab294bfe28c7246", | |
"hash_cont_tokens": "8a9747fc4fc5f4cb" | |
}, | |
"truncated": 0, | |
"non_truncated": 7995, | |
"padded": 15990, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "bc2005cc9d2f436e", | |
"hash_full_prompts": "6e4debf699f71781", | |
"hash_input_tokens": "d0c5a498cb213c4c", | |
"hash_cont_tokens": "e56872af8309a868" | |
}, | |
"truncated": 0, | |
"non_truncated": 5995, | |
"padded": 17985, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "6fb0e254ea5945d8", | |
"hash_full_prompts": "6e6574d62f6cc8d7", | |
"hash_input_tokens": "a295f8c5594566c6", | |
"hash_cont_tokens": "956cd594be2295d5" | |
}, | |
"truncated": 0, | |
"non_truncated": 1720, | |
"padded": 5160, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_exams|0": { | |
"hashes": { | |
"hash_examples": "6d721df351722656", | |
"hash_full_prompts": "f4bf6df7bb14ccaa", | |
"hash_input_tokens": "abac0df295dc9a98", | |
"hash_cont_tokens": "e6ffbfe149a6c203" | |
}, | |
"truncated": 0, | |
"non_truncated": 537, | |
"padded": 2148, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Accounting (University)|0": { | |
"hashes": { | |
"hash_examples": "30e09697562ff9e7", | |
"hash_full_prompts": "2f6d58c5571ae31f", | |
"hash_input_tokens": "364f6f05c83f8a0f", | |
"hash_cont_tokens": "6f5e2632293b3885" | |
}, | |
"truncated": 0, | |
"non_truncated": 74, | |
"padded": 256, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (General)|0": { | |
"hashes": { | |
"hash_examples": "bef69fb8b3b75f28", | |
"hash_full_prompts": "16aa51c842a7b076", | |
"hash_input_tokens": "324538da9a670dc9", | |
"hash_cont_tokens": "163ed4686402ab60" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2403, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Grammar)|0": { | |
"hashes": { | |
"hash_examples": "bd066a9e6a140a4b", | |
"hash_full_prompts": "0794fa55d5e25721", | |
"hash_input_tokens": "82ad98424dc0361f", | |
"hash_cont_tokens": "759cb83e307227a9" | |
}, | |
"truncated": 0, | |
"non_truncated": 365, | |
"padded": 1588, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (High School)|0": { | |
"hashes": { | |
"hash_examples": "a9c2cd9a9929292a", | |
"hash_full_prompts": "88899929dae8993d", | |
"hash_input_tokens": "b2d2b371abbfcb14", | |
"hash_cont_tokens": "adcb755ebfabeed5" | |
}, | |
"truncated": 0, | |
"non_truncated": 390, | |
"padded": 1525, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "2f8a77bbbd0e21ff", | |
"hash_full_prompts": "3954736dff2a70d7", | |
"hash_input_tokens": "943cd09a1c49fee8", | |
"hash_cont_tokens": "5218f841f0f00a72" | |
}, | |
"truncated": 0, | |
"non_truncated": 27, | |
"padded": 105, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "5eed3da47822539b", | |
"hash_full_prompts": "fd0ec6331449b6ae", | |
"hash_input_tokens": "fb6b1f0e73277b9f", | |
"hash_cont_tokens": "b8b820f74f31eb8f" | |
}, | |
"truncated": 0, | |
"non_truncated": 252, | |
"padded": 926, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Biology (High School)|0": { | |
"hashes": { | |
"hash_examples": "91ae6d22a0f0213d", | |
"hash_full_prompts": "a3c88643cb2703da", | |
"hash_input_tokens": "986f9d6ef3d0e980", | |
"hash_cont_tokens": "27dab0131515f35c" | |
}, | |
"truncated": 0, | |
"non_truncated": 1409, | |
"padded": 5056, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Civics (High School)|0": { | |
"hashes": { | |
"hash_examples": "f27bf8791bea2bb9", | |
"hash_full_prompts": "06dc869eed60441a", | |
"hash_input_tokens": "63b6d1416813961a", | |
"hash_cont_tokens": "a40cce8429e071bf" | |
}, | |
"truncated": 0, | |
"non_truncated": 87, | |
"padded": 316, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Civics (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "74f5bb0098c8916f", | |
"hash_full_prompts": "ae51faf29fdec13d", | |
"hash_input_tokens": "20d815a72d8885a0", | |
"hash_cont_tokens": "1da2261f1ce8ad1b" | |
}, | |
"truncated": 0, | |
"non_truncated": 236, | |
"padded": 944, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (High School)|0": { | |
"hashes": { | |
"hash_examples": "a4278d7b525d46fe", | |
"hash_full_prompts": "3fe85113f033b903", | |
"hash_input_tokens": "8d903a9e30c0dfdc", | |
"hash_cont_tokens": "1b48294784e98dff" | |
}, | |
"truncated": 0, | |
"non_truncated": 261, | |
"padded": 1006, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "0cb6c07e4b80dfd4", | |
"hash_full_prompts": "fd42589c35f5b975", | |
"hash_input_tokens": "4bfe54723fdb39ab", | |
"hash_cont_tokens": "94e281bbf7253427" | |
}, | |
"truncated": 0, | |
"non_truncated": 27, | |
"padded": 100, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "d96fc1bc32473533", | |
"hash_full_prompts": "75ab86dc9862455b", | |
"hash_input_tokens": "05bbac160f3dcd7e", | |
"hash_cont_tokens": "5a81f7cc924777fb" | |
}, | |
"truncated": 0, | |
"non_truncated": 190, | |
"padded": 476, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (University)|0": { | |
"hashes": { | |
"hash_examples": "8835587e436cbaff", | |
"hash_full_prompts": "64323836b1557dc3", | |
"hash_input_tokens": "98e7cc88965d944b", | |
"hash_cont_tokens": "132ca54292c6c0b1" | |
}, | |
"truncated": 0, | |
"non_truncated": 64, | |
"padded": 255, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Driving Test|0": { | |
"hashes": { | |
"hash_examples": "7a4c38a2c451d075", | |
"hash_full_prompts": "644dc2f105afe621", | |
"hash_input_tokens": "eb3274ee40ac072d", | |
"hash_cont_tokens": "e31c784c4701f2c1" | |
}, | |
"truncated": 0, | |
"non_truncated": 1211, | |
"padded": 3685, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Economics (High School)|0": { | |
"hashes": { | |
"hash_examples": "c04c252836601279", | |
"hash_full_prompts": "3dddf3296198fc1b", | |
"hash_input_tokens": "89ba91e866588ce8", | |
"hash_cont_tokens": "156b49afd06793b6" | |
}, | |
"truncated": 0, | |
"non_truncated": 360, | |
"padded": 1390, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Economics (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "18fba1579406b3cc", | |
"hash_full_prompts": "64942bede3fb2a9f", | |
"hash_input_tokens": "96a0db18e890d505", | |
"hash_cont_tokens": "0c9f5434febed22c" | |
}, | |
"truncated": 0, | |
"non_truncated": 87, | |
"padded": 348, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Economics (University)|0": { | |
"hashes": { | |
"hash_examples": "7c9e86fba8151562", | |
"hash_full_prompts": "1b24d9f872790c8c", | |
"hash_input_tokens": "c5579a0767ee4b23", | |
"hash_cont_tokens": "920a8a8639b46df4" | |
}, | |
"truncated": 0, | |
"non_truncated": 137, | |
"padded": 544, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge|0": { | |
"hashes": { | |
"hash_examples": "acfbe4e1f0314b85", | |
"hash_full_prompts": "036b955e31fb86ab", | |
"hash_input_tokens": "a57bbeba46b75585", | |
"hash_cont_tokens": "7c4ac833bf59e32c" | |
}, | |
"truncated": 0, | |
"non_truncated": 864, | |
"padded": 3213, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "03cd0ecf10224316", | |
"hash_full_prompts": "c8abdf4be6016573", | |
"hash_input_tokens": "f55a21275c4edbe3", | |
"hash_cont_tokens": "d237bea7c387ef23" | |
}, | |
"truncated": 0, | |
"non_truncated": 172, | |
"padded": 628, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "c3ee30196e05e122", | |
"hash_full_prompts": "2b05caf54b1e957f", | |
"hash_input_tokens": "feec74062c205114", | |
"hash_cont_tokens": "79af7db64d152414" | |
}, | |
"truncated": 0, | |
"non_truncated": 162, | |
"padded": 637, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Geography (High School)|0": { | |
"hashes": { | |
"hash_examples": "e2e329d2bdd9fb7b", | |
"hash_full_prompts": "22b46963e390b2d8", | |
"hash_input_tokens": "434c0cccc0a2aac0", | |
"hash_cont_tokens": "bb2b7fe7573f8b51" | |
}, | |
"truncated": 0, | |
"non_truncated": 1038, | |
"padded": 4116, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Geography (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "420b161444291989", | |
"hash_full_prompts": "734a5a9ea0cc7cad", | |
"hash_input_tokens": "94d11c7aa90c1376", | |
"hash_cont_tokens": "77b29447ace50192" | |
}, | |
"truncated": 0, | |
"non_truncated": 272, | |
"padded": 975, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Geography (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "5bc5ca48a4210899", | |
"hash_full_prompts": "5bf469b423fb1da2", | |
"hash_input_tokens": "37c918f93e11d237", | |
"hash_cont_tokens": "3fcd2312ae002e19" | |
}, | |
"truncated": 0, | |
"non_truncated": 57, | |
"padded": 216, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:History (High School)|0": { | |
"hashes": { | |
"hash_examples": "c7cc37f29311bea1", | |
"hash_full_prompts": "294472221c0507de", | |
"hash_input_tokens": "4b14c8ff57606abf", | |
"hash_cont_tokens": "d9263ab71389ae04" | |
}, | |
"truncated": 0, | |
"non_truncated": 760, | |
"padded": 2962, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:History (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "5b9f1973337153a2", | |
"hash_full_prompts": "24cfce764a82b7b2", | |
"hash_input_tokens": "de6bb218eb70e3f7", | |
"hash_cont_tokens": "1c2f868067672b81" | |
}, | |
"truncated": 0, | |
"non_truncated": 203, | |
"padded": 746, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:History (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "af2469847007c1fe", | |
"hash_full_prompts": "c77af42efa30fb24", | |
"hash_input_tokens": "b4a034be65435283", | |
"hash_cont_tokens": "0f75aed3d4483b07" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies|0": { | |
"hashes": { | |
"hash_examples": "c8da9b2f16a5ea0f", | |
"hash_full_prompts": "9eec1996f1cbb945", | |
"hash_input_tokens": "e6046c736c4550d7", | |
"hash_cont_tokens": "f928d8748d8739be" | |
}, | |
"truncated": 0, | |
"non_truncated": 639, | |
"padded": 2529, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (High School)|0": { | |
"hashes": { | |
"hash_examples": "efb11bc8ef398117", | |
"hash_full_prompts": "b3eb1c7a668c56fa", | |
"hash_input_tokens": "446425ff0904146b", | |
"hash_cont_tokens": "12f57559c6c1e237" | |
}, | |
"truncated": 0, | |
"non_truncated": 334, | |
"padded": 1285, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "9e33ab030eebdb99", | |
"hash_full_prompts": "7f7ac80dbcc37b59", | |
"hash_input_tokens": "7cfc80819ab31709", | |
"hash_cont_tokens": "368694cb38312791" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 883, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "4167565d878b20eb", | |
"hash_full_prompts": "34d2e73be58c245d", | |
"hash_input_tokens": "0babd81bb2b2370b", | |
"hash_cont_tokens": "9e235ba050070efe" | |
}, | |
"truncated": 0, | |
"non_truncated": 999, | |
"padded": 3024, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Law (Professional)|0": { | |
"hashes": { | |
"hash_examples": "e77f52c8fe4352b3", | |
"hash_full_prompts": "9bd4c4d675f2c019", | |
"hash_input_tokens": "1edde50595a0b238", | |
"hash_cont_tokens": "0522e913bb39ff0a" | |
}, | |
"truncated": 0, | |
"non_truncated": 314, | |
"padded": 1232, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Management (University)|0": { | |
"hashes": { | |
"hash_examples": "09682649b04b7327", | |
"hash_full_prompts": "3cf9267e2a2a82d4", | |
"hash_input_tokens": "36c3a2f3aa34bf3a", | |
"hash_cont_tokens": "8f5172696771d8c3" | |
}, | |
"truncated": 0, | |
"non_truncated": 75, | |
"padded": 200, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Math (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "edb027bfae7e76f1", | |
"hash_full_prompts": "0105f8ebb67a9caa", | |
"hash_input_tokens": "c41bc50b587c9bf2", | |
"hash_cont_tokens": "db406057d9720937" | |
}, | |
"truncated": 0, | |
"non_truncated": 409, | |
"padded": 1296, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "96e72c9094c2364c", | |
"hash_full_prompts": "1faafa362c2898ac", | |
"hash_input_tokens": "db6c078a63b0b886", | |
"hash_cont_tokens": "4edcdca4565ae581" | |
}, | |
"truncated": 0, | |
"non_truncated": 242, | |
"padded": 940, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "69e35bad3dec5a4d", | |
"hash_full_prompts": "c52e2bd4f054744e", | |
"hash_input_tokens": "5bdea43dfe7a19df", | |
"hash_cont_tokens": "274ed0c4b306c82b" | |
}, | |
"truncated": 0, | |
"non_truncated": 336, | |
"padded": 1228, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Philosophy (High School)|0": { | |
"hashes": { | |
"hash_examples": "dc6ebd484a02fca5", | |
"hash_full_prompts": "3733099688f29a5a", | |
"hash_input_tokens": "0b4274a197f68a71", | |
"hash_cont_tokens": "435fa4ce69a608aa" | |
}, | |
"truncated": 0, | |
"non_truncated": 39, | |
"padded": 156, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Physics (High School)|0": { | |
"hashes": { | |
"hash_examples": "58a1722472c9e644", | |
"hash_full_prompts": "05def37176e1a20c", | |
"hash_input_tokens": "e89032f889175232", | |
"hash_cont_tokens": "03832753331db913" | |
}, | |
"truncated": 0, | |
"non_truncated": 255, | |
"padded": 1020, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Political Science (University)|0": { | |
"hashes": { | |
"hash_examples": "07a4ed6aabbdfd1e", | |
"hash_full_prompts": "503c2fa62dab8238", | |
"hash_input_tokens": "3d1b3392b93da21c", | |
"hash_cont_tokens": "c4e7bb380fa0981e" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 710, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "8ca955902f304664", | |
"hash_full_prompts": "4b501740c138a870", | |
"hash_input_tokens": "3f84bd3cf25e3e36", | |
"hash_cont_tokens": "d729cec15f8a4f86" | |
}, | |
"truncated": 0, | |
"non_truncated": 241, | |
"padded": 929, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "934025ab3738123c", | |
"hash_full_prompts": "d474c3ff581e9281", | |
"hash_input_tokens": "7eb2299169877c79", | |
"hash_cont_tokens": "31dfd56d7e3044c6" | |
}, | |
"truncated": 0, | |
"non_truncated": 705, | |
"padded": 2043, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:abstract_algebra|0": { | |
"hashes": { | |
"hash_examples": "0b557911f2f6d919", | |
"hash_full_prompts": "45d931a9437df018", | |
"hash_input_tokens": "424cf0e99816ea88", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:anatomy|0": { | |
"hashes": { | |
"hash_examples": "a552d8a0ef294061", | |
"hash_full_prompts": "7ed2e5df481665e9", | |
"hash_input_tokens": "8e7e92db95204c71", | |
"hash_cont_tokens": "2de6024aeae9961e" | |
}, | |
"truncated": 0, | |
"non_truncated": 135, | |
"padded": 540, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:astronomy|0": { | |
"hashes": { | |
"hash_examples": "c4a372d0af7da098", | |
"hash_full_prompts": "293991eddaa67d56", | |
"hash_input_tokens": "fc16e8321950ff43", | |
"hash_cont_tokens": "25c305043822224b" | |
}, | |
"truncated": 0, | |
"non_truncated": 152, | |
"padded": 608, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:business_ethics|0": { | |
"hashes": { | |
"hash_examples": "9f71d816abf8af7a", | |
"hash_full_prompts": "d99598d45bb26329", | |
"hash_input_tokens": "52651a1712d3da1b", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:clinical_knowledge|0": { | |
"hashes": { | |
"hash_examples": "38303cd765589ef3", | |
"hash_full_prompts": "292f53b9bec18ccf", | |
"hash_input_tokens": "d01694aa92da2c37", | |
"hash_cont_tokens": "bc907759e6118a55" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 1060, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_biology|0": { | |
"hashes": { | |
"hash_examples": "dbd9b5d318e60b04", | |
"hash_full_prompts": "24bc2a0f2e806b7e", | |
"hash_input_tokens": "91404bd622bf61bb", | |
"hash_cont_tokens": "ce38de044f15af7e" | |
}, | |
"truncated": 0, | |
"non_truncated": 144, | |
"padded": 576, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_chemistry|0": { | |
"hashes": { | |
"hash_examples": "6f88491d03db8a4c", | |
"hash_full_prompts": "a0244bd4d5367105", | |
"hash_input_tokens": "ac50c708ff17d870", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_computer_science|0": { | |
"hashes": { | |
"hash_examples": "ebfdee5ef2ed5e17", | |
"hash_full_prompts": "47367af19bf4627e", | |
"hash_input_tokens": "f56d7f1df38db2c7", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_mathematics|0": { | |
"hashes": { | |
"hash_examples": "e3f22cd7712aae2f", | |
"hash_full_prompts": "08d6cc7b4ded7c8d", | |
"hash_input_tokens": "1f39f1de42a78f61", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_medicine|0": { | |
"hashes": { | |
"hash_examples": "51a5501373afb5a7", | |
"hash_full_prompts": "a38225bdd758a587", | |
"hash_input_tokens": "33a842e840a6f7ee", | |
"hash_cont_tokens": "644dda1c1c1390d6" | |
}, | |
"truncated": 0, | |
"non_truncated": 173, | |
"padded": 684, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_physics|0": { | |
"hashes": { | |
"hash_examples": "2d3e015989b108db", | |
"hash_full_prompts": "feaf7c3e8183df19", | |
"hash_input_tokens": "05c35ee92b1479dd", | |
"hash_cont_tokens": "8c377165a442ecad" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 408, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:computer_security|0": { | |
"hashes": { | |
"hash_examples": "f8810eddc38dfee4", | |
"hash_full_prompts": "40c1b4c1ff4a7887", | |
"hash_input_tokens": "41bc83280bd14cc6", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:conceptual_physics|0": { | |
"hashes": { | |
"hash_examples": "211e32cc43c6b1dc", | |
"hash_full_prompts": "d2a97625674dc736", | |
"hash_input_tokens": "bcfb74acac33751e", | |
"hash_cont_tokens": "e3452977dff76d65" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 940, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:econometrics|0": { | |
"hashes": { | |
"hash_examples": "810023786b2484d2", | |
"hash_full_prompts": "74208873b5e6bcb8", | |
"hash_input_tokens": "221c1075f9f8dc4f", | |
"hash_cont_tokens": "17aaa60288291684" | |
}, | |
"truncated": 0, | |
"non_truncated": 114, | |
"padded": 456, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:electrical_engineering|0": { | |
"hashes": { | |
"hash_examples": "a222760c93eaa1ee", | |
"hash_full_prompts": "8e757b37feb018e4", | |
"hash_input_tokens": "4c8a8dc96036f35b", | |
"hash_cont_tokens": "f842ac592ef88589" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 580, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:elementary_mathematics|0": { | |
"hashes": { | |
"hash_examples": "4c069aeee64dc227", | |
"hash_full_prompts": "a70cd732fe45ccce", | |
"hash_input_tokens": "019b2970ed1d6166", | |
"hash_cont_tokens": "adcacd14363d9520" | |
}, | |
"truncated": 0, | |
"non_truncated": 378, | |
"padded": 1512, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:formal_logic|0": { | |
"hashes": { | |
"hash_examples": "3cb0ccbf8e8a77ae", | |
"hash_full_prompts": "0a6dd6f8ad70d891", | |
"hash_input_tokens": "a67ef7f0ec61aa57", | |
"hash_cont_tokens": "037a56d61a16d055" | |
}, | |
"truncated": 0, | |
"non_truncated": 126, | |
"padded": 504, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:global_facts|0": { | |
"hashes": { | |
"hash_examples": "c1d039e64ea321b9", | |
"hash_full_prompts": "d98f6c1521163958", | |
"hash_input_tokens": "ccf4713c4b7b0ae9", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_biology|0": { | |
"hashes": { | |
"hash_examples": "ddcb8237bb4ba08a", | |
"hash_full_prompts": "da368db27f7b000f", | |
"hash_input_tokens": "cd9a85512d348256", | |
"hash_cont_tokens": "d00c14ab4f65ffde" | |
}, | |
"truncated": 0, | |
"non_truncated": 3813, | |
"padded": 15216, | |
"non_padded": 36, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_chemistry|0": { | |
"hashes": { | |
"hash_examples": "07061b55c5c436d9", | |
"hash_full_prompts": "0fa92fcd4d169e31", | |
"hash_input_tokens": "e29e8516533eda6e", | |
"hash_cont_tokens": "b03c3985056914e8" | |
}, | |
"truncated": 0, | |
"non_truncated": 4016, | |
"padded": 16012, | |
"non_padded": 52, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_computer_science|0": { | |
"hashes": { | |
"hash_examples": "8d3405483d5fdcff", | |
"hash_full_prompts": "416b1f3ff952a502", | |
"hash_input_tokens": "ffb0c9ed433a4efe", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_european_history|0": { | |
"hashes": { | |
"hash_examples": "031c49a430356414", | |
"hash_full_prompts": "8d785ba575657875", | |
"hash_input_tokens": "b435d9d2ff073b9a", | |
"hash_cont_tokens": "0e7a76cd8060c36c" | |
}, | |
"truncated": 0, | |
"non_truncated": 8152, | |
"padded": 32552, | |
"non_padded": 56, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_geography|0": { | |
"hashes": { | |
"hash_examples": "d0ce2b019a66c1de", | |
"hash_full_prompts": "97905ad263f3c187", | |
"hash_input_tokens": "c8cf98051051bf08", | |
"hash_cont_tokens": "36713106fb5a42a6" | |
}, | |
"truncated": 0, | |
"non_truncated": 198, | |
"padded": 792, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { | |
"hashes": { | |
"hash_examples": "7d7c6d476d0576b1", | |
"hash_full_prompts": "2d9793a9875ccd49", | |
"hash_input_tokens": "61edee4d29e41759", | |
"hash_cont_tokens": "018ca11705102481" | |
}, | |
"truncated": 0, | |
"non_truncated": 193, | |
"padded": 764, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { | |
"hashes": { | |
"hash_examples": "694d3a01c6144ddb", | |
"hash_full_prompts": "05d016c0334fdb76", | |
"hash_input_tokens": "e65da922ddd2e066", | |
"hash_cont_tokens": "5903899225519095" | |
}, | |
"truncated": 0, | |
"non_truncated": 2891, | |
"padded": 11536, | |
"non_padded": 28, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_mathematics|0": { | |
"hashes": { | |
"hash_examples": "004f9c0a40b5ec10", | |
"hash_full_prompts": "176047248bd75559", | |
"hash_input_tokens": "38623aa588a7681a", | |
"hash_cont_tokens": "bb41bae3a8194bb4" | |
}, | |
"truncated": 0, | |
"non_truncated": 270, | |
"padded": 1080, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_microeconomics|0": { | |
"hashes": { | |
"hash_examples": "80cf03d462e6ccbc", | |
"hash_full_prompts": "43531f7abbb17834", | |
"hash_input_tokens": "7981aca65d300d36", | |
"hash_cont_tokens": "b334be51bbea1ab9" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 952, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_physics|0": { | |
"hashes": { | |
"hash_examples": "92218def5b383845", | |
"hash_full_prompts": "09326030be9e7122", | |
"hash_input_tokens": "bfeaa2969b63a076", | |
"hash_cont_tokens": "083bf16a6a90bba6" | |
}, | |
"truncated": 0, | |
"non_truncated": 151, | |
"padded": 600, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_psychology|0": { | |
"hashes": { | |
"hash_examples": "323f7848fee32e58", | |
"hash_full_prompts": "ca2ab17e0eab8336", | |
"hash_input_tokens": "aada063e11bcf834", | |
"hash_cont_tokens": "636067a6dba8c739" | |
}, | |
"truncated": 0, | |
"non_truncated": 545, | |
"padded": 2156, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_statistics|0": { | |
"hashes": { | |
"hash_examples": "d7bbe0d037cf31ec", | |
"hash_full_prompts": "1cea50a255539e55", | |
"hash_input_tokens": "f6c5e19c4ea34a9b", | |
"hash_cont_tokens": "5222f2060756622f" | |
}, | |
"truncated": 0, | |
"non_truncated": 4232, | |
"padded": 16872, | |
"non_padded": 56, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_us_history|0": { | |
"hashes": { | |
"hash_examples": "722ec9207e3b0e04", | |
"hash_full_prompts": "9d2670d3d5aabe6f", | |
"hash_input_tokens": "1d6f67ab89e20e17", | |
"hash_cont_tokens": "4a315e951c0e1d42" | |
}, | |
"truncated": 0, | |
"non_truncated": 204, | |
"padded": 816, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_world_history|0": { | |
"hashes": { | |
"hash_examples": "b5eb675d3b578584", | |
"hash_full_prompts": "df1fc5f9d5a03531", | |
"hash_input_tokens": "16c84006a391a94a", | |
"hash_cont_tokens": "5cd0ec21c109f84d" | |
}, | |
"truncated": 0, | |
"non_truncated": 237, | |
"padded": 948, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:human_aging|0": { | |
"hashes": { | |
"hash_examples": "713ac79cd2dd2d7b", | |
"hash_full_prompts": "a34e3725ba274497", | |
"hash_input_tokens": "9879321ef599f9e5", | |
"hash_cont_tokens": "dde16bfcb64c965c" | |
}, | |
"truncated": 0, | |
"non_truncated": 223, | |
"padded": 888, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:human_sexuality|0": { | |
"hashes": { | |
"hash_examples": "47551ab4e5dcf6c5", | |
"hash_full_prompts": "aee4501a98f74c02", | |
"hash_input_tokens": "5447cfebb11ee92c", | |
"hash_cont_tokens": "0cb40262c47c9b00" | |
}, | |
"truncated": 0, | |
"non_truncated": 131, | |
"padded": 516, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:international_law|0": { | |
"hashes": { | |
"hash_examples": "da360336943398d5", | |
"hash_full_prompts": "4a0e7fb8a7d02d5d", | |
"hash_input_tokens": "25ce686f81bc45cd", | |
"hash_cont_tokens": "f15613809463247b" | |
}, | |
"truncated": 0, | |
"non_truncated": 121, | |
"padded": 484, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:jurisprudence|0": { | |
"hashes": { | |
"hash_examples": "661d161a486fb035", | |
"hash_full_prompts": "3e1d89854cadfa32", | |
"hash_input_tokens": "7d00e578b6fb44b0", | |
"hash_cont_tokens": "3ac3b35a85d62d5b" | |
}, | |
"truncated": 0, | |
"non_truncated": 108, | |
"padded": 432, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:logical_fallacies|0": { | |
"hashes": { | |
"hash_examples": "5c3926384758bda7", | |
"hash_full_prompts": "9ec1be5a57dd33d6", | |
"hash_input_tokens": "3872d2021c8ac508", | |
"hash_cont_tokens": "1a312fd21529b45f" | |
}, | |
"truncated": 0, | |
"non_truncated": 163, | |
"padded": 640, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:machine_learning|0": { | |
"hashes": { | |
"hash_examples": "3ce756e6a22ffc48", | |
"hash_full_prompts": "c488422d986fcf7c", | |
"hash_input_tokens": "183efca34c48d478", | |
"hash_cont_tokens": "9ab3910d6a41713c" | |
}, | |
"truncated": 0, | |
"non_truncated": 112, | |
"padded": 432, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:management|0": { | |
"hashes": { | |
"hash_examples": "20fe769bb3276832", | |
"hash_full_prompts": "d0c4d61d4b29f10f", | |
"hash_input_tokens": "156c7903111bf226", | |
"hash_cont_tokens": "c1814986ca86d272" | |
}, | |
"truncated": 0, | |
"non_truncated": 103, | |
"padded": 405, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:marketing|0": { | |
"hashes": { | |
"hash_examples": "6b19449559d987ce", | |
"hash_full_prompts": "c4c006e155da3ab9", | |
"hash_input_tokens": "d00dcc652b39ad79", | |
"hash_cont_tokens": "8b5eec7438345078" | |
}, | |
"truncated": 0, | |
"non_truncated": 234, | |
"padded": 920, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:medical_genetics|0": { | |
"hashes": { | |
"hash_examples": "cbb0fa9df0f5435a", | |
"hash_full_prompts": "211eef4492aedde5", | |
"hash_input_tokens": "2ca6cca73da883b0", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:miscellaneous|0": { | |
"hashes": { | |
"hash_examples": "0a4134046c23cff9", | |
"hash_full_prompts": "0bb34c68293f5fbb", | |
"hash_input_tokens": "40a88adbe491c91c", | |
"hash_cont_tokens": "e9776f0729d4ad47" | |
}, | |
"truncated": 0, | |
"non_truncated": 2420, | |
"padded": 9580, | |
"non_padded": 100, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_disputes|0": { | |
"hashes": { | |
"hash_examples": "1ac8a0967c82caa0", | |
"hash_full_prompts": "c58ac31368702119", | |
"hash_input_tokens": "4b7effc77bfa34b5", | |
"hash_cont_tokens": "84178e69540069fa" | |
}, | |
"truncated": 0, | |
"non_truncated": 346, | |
"padded": 1380, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_scenarios|0": { | |
"hashes": { | |
"hash_examples": "2c0670188bc5a789", | |
"hash_full_prompts": "5416a570176e7ee9", | |
"hash_input_tokens": "f0aa1c2de29efb96", | |
"hash_cont_tokens": "68dd815f84a4c553" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3540, | |
"non_padded": 40, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:nutrition|0": { | |
"hashes": { | |
"hash_examples": "658628c0dcdfe201", | |
"hash_full_prompts": "33aca616fd4c585d", | |
"hash_input_tokens": "5ff46229ad8ecee4", | |
"hash_cont_tokens": "c5203e0db99c8254" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1204, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:philosophy|0": { | |
"hashes": { | |
"hash_examples": "8b6707b322affafd", | |
"hash_full_prompts": "d83cd2a284923854", | |
"hash_input_tokens": "e71180b724e4f0f0", | |
"hash_cont_tokens": "02aa1b69ad245f52" | |
}, | |
"truncated": 0, | |
"non_truncated": 311, | |
"padded": 1220, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:prehistory|0": { | |
"hashes": { | |
"hash_examples": "0c85ffcdc9a7b367", | |
"hash_full_prompts": "ecd66be22a6e28e1", | |
"hash_input_tokens": "f538b16a5f96884a", | |
"hash_cont_tokens": "eede8e9c60c3f633" | |
}, | |
"truncated": 0, | |
"non_truncated": 324, | |
"padded": 1276, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_accounting|0": { | |
"hashes": { | |
"hash_examples": "cce1ea2d5f544b2f", | |
"hash_full_prompts": "3d80c6d9f0dc3cee", | |
"hash_input_tokens": "c4167eb8512fdf51", | |
"hash_cont_tokens": "a9813bcaf88fe200" | |
}, | |
"truncated": 0, | |
"non_truncated": 4514, | |
"padded": 17904, | |
"non_padded": 152, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_law|0": { | |
"hashes": { | |
"hash_examples": "1c654b024b54eb4b", | |
"hash_full_prompts": "e17a7da400ec960b", | |
"hash_input_tokens": "505f7ae817aee81b", | |
"hash_cont_tokens": "2222cfca282be802" | |
}, | |
"truncated": 0, | |
"non_truncated": 7987, | |
"padded": 31796, | |
"non_padded": 152, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_medicine|0": { | |
"hashes": { | |
"hash_examples": "c621eaacfa662ebc", | |
"hash_full_prompts": "c8265bc0f127a9ea", | |
"hash_input_tokens": "80cdebdfa1778240", | |
"hash_cont_tokens": "b8eb0108a598c708" | |
}, | |
"truncated": 0, | |
"non_truncated": 1637, | |
"padded": 6496, | |
"non_padded": 52, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_psychology|0": { | |
"hashes": { | |
"hash_examples": "bc14a28eaec87dc4", | |
"hash_full_prompts": "94f2f4037840f942", | |
"hash_input_tokens": "9ff3dd4ecf23d4fd", | |
"hash_cont_tokens": "c8f5fcc8b24ec539" | |
}, | |
"truncated": 0, | |
"non_truncated": 3503, | |
"padded": 13872, | |
"non_padded": 140, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:public_relations|0": { | |
"hashes": { | |
"hash_examples": "de4989d9375885c4", | |
"hash_full_prompts": "e71dde2b261c58b7", | |
"hash_input_tokens": "3717004ba2183108", | |
"hash_cont_tokens": "850e6fbe5c4cbeae" | |
}, | |
"truncated": 0, | |
"non_truncated": 110, | |
"padded": 436, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:security_studies|0": { | |
"hashes": { | |
"hash_examples": "3f84bfeec717c6de", | |
"hash_full_prompts": "400097918c976960", | |
"hash_input_tokens": "bf108e371f6af68d", | |
"hash_cont_tokens": "3de9251f35e9c690" | |
}, | |
"truncated": 0, | |
"non_truncated": 245, | |
"padded": 980, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:sociology|0": { | |
"hashes": { | |
"hash_examples": "10d7c2fae10bfcbc", | |
"hash_full_prompts": "f51c566e69079630", | |
"hash_input_tokens": "5e7ebb38fc1252a3", | |
"hash_cont_tokens": "68f24f2c79eea166" | |
}, | |
"truncated": 0, | |
"non_truncated": 201, | |
"padded": 804, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:us_foreign_policy|0": { | |
"hashes": { | |
"hash_examples": "bb05f02c38ddaf1a", | |
"hash_full_prompts": "4420bd5e131a3335", | |
"hash_input_tokens": "a568a478dcd9d2b8", | |
"hash_cont_tokens": "ba6a2574cbd07ca3" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:virology|0": { | |
"hashes": { | |
"hash_examples": "290915a48884ede2", | |
"hash_full_prompts": "32371a0f8317d24b", | |
"hash_input_tokens": "2d4247241459c550", | |
"hash_cont_tokens": "537836653abda55a" | |
}, | |
"truncated": 0, | |
"non_truncated": 166, | |
"padded": 664, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:world_religions|0": { | |
"hashes": { | |
"hash_examples": "91cc5451c7284f75", | |
"hash_full_prompts": "0a65f2ae09b79645", | |
"hash_input_tokens": "6b5f782145fb0985", | |
"hash_cont_tokens": "78542539eccacf8d" | |
}, | |
"truncated": 0, | |
"non_truncated": 171, | |
"padded": 676, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|madinah_qa:Arabic Language (General)|0": { | |
"hashes": { | |
"hash_examples": "bef69fb8b3b75f28", | |
"hash_full_prompts": "16aa51c842a7b076", | |
"hash_input_tokens": "d070e33d7bfdd4f5", | |
"hash_cont_tokens": "6248da77451ebfea" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2372, | |
"non_padded": 31, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|madinah_qa:Arabic Language (Grammar)|0": { | |
"hashes": { | |
"hash_examples": "bd066a9e6a140a4b", | |
"hash_full_prompts": "0794fa55d5e25721", | |
"hash_input_tokens": "06c4de66c40c121f", | |
"hash_cont_tokens": "a6c3c5915f0ed43e" | |
}, | |
"truncated": 0, | |
"non_truncated": 365, | |
"padded": 1557, | |
"non_padded": 31, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Ethics|0": { | |
"hashes": { | |
"hash_examples": "5d32da36271c5eb4", | |
"hash_full_prompts": "8f190797b00b73cd", | |
"hash_input_tokens": "7b1afaceb873fdfe", | |
"hash_cont_tokens": "e58404d243228d63" | |
}, | |
"truncated": 0, | |
"non_truncated": 60, | |
"padded": 178, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Illegal|0": { | |
"hashes": { | |
"hash_examples": "0c07f1f100f2d0e8", | |
"hash_full_prompts": "43c41b536d4e9156", | |
"hash_input_tokens": "42265216326b89b8", | |
"hash_cont_tokens": "f329095cea89e8ef" | |
}, | |
"truncated": 0, | |
"non_truncated": 53, | |
"padded": 159, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:MentalHealth|0": { | |
"hashes": { | |
"hash_examples": "8e5fc5c4704bd96b", | |
"hash_full_prompts": "5c847f68ff5a829f", | |
"hash_input_tokens": "3e4491a2d91f217c", | |
"hash_cont_tokens": "501cb92a1f1f3bc4" | |
}, | |
"truncated": 0, | |
"non_truncated": 76, | |
"padded": 221, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Offensive|0": { | |
"hashes": { | |
"hash_examples": "5ad4369b7dc5de46", | |
"hash_full_prompts": "7b328bbab91bd614", | |
"hash_input_tokens": "8d4a37c2233651ee", | |
"hash_cont_tokens": "51bda8813f456fd1" | |
}, | |
"truncated": 0, | |
"non_truncated": 69, | |
"padded": 207, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:PhysicalHealth|0": { | |
"hashes": { | |
"hash_examples": "dc2a632e2dcc86db", | |
"hash_full_prompts": "75cbbaab48439198", | |
"hash_input_tokens": "3b03d0b25443858c", | |
"hash_cont_tokens": "d9d15263ee32a322" | |
}, | |
"truncated": 0, | |
"non_truncated": 73, | |
"padded": 219, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Privacy|0": { | |
"hashes": { | |
"hash_examples": "295e35448a39e003", | |
"hash_full_prompts": "abfb4817fa106a5f", | |
"hash_input_tokens": "7141f56e68c19434", | |
"hash_cont_tokens": "9fda45e0b1043810" | |
}, | |
"truncated": 0, | |
"non_truncated": 57, | |
"padded": 168, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Trustfulness|0": { | |
"hashes": { | |
"hash_examples": "e79ac1ea5439e623", | |
"hash_full_prompts": "7383e5b4bb594029", | |
"hash_input_tokens": "ec4ee04333ce59e7", | |
"hash_cont_tokens": "53b4c9d62a057d7e" | |
}, | |
"truncated": 0, | |
"non_truncated": 78, | |
"padded": 231, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Unfairness|0": { | |
"hashes": { | |
"hash_examples": "4ac5dccbfbdc5077", | |
"hash_full_prompts": "a500cd2de237972c", | |
"hash_input_tokens": "4d7d69dbcdb4d384", | |
"hash_cont_tokens": "bc7e6f3987309f01" | |
}, | |
"truncated": 0, | |
"non_truncated": 55, | |
"padded": 153, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alrage_qa|0": { | |
"hashes": { | |
"hash_examples": "3edbbe22cabd4160", | |
"hash_full_prompts": "e6925371a40678f3", | |
"hash_input_tokens": "8c43f10738adb3e4", | |
"hash_cont_tokens": "b3c03310522e063f" | |
}, | |
"truncated": 1217, | |
"non_truncated": 889, | |
"padded": 1683, | |
"non_padded": 423, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "b8b3b49631adcc40", | |
"hash_full_prompts": "dd0c3e2434d4d8f9", | |
"hash_input_tokens": "01ef3a5d016fbb9c", | |
"hash_cont_tokens": "454e216bb7ad4d9a" | |
}, | |
"truncated": 1, | |
"non_truncated": 91861, | |
"padded": 335738, | |
"non_padded": 1146, | |
"num_truncated_few_shots": 0 | |
} | |
} |