|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null, |
|
"job_id": 0, |
|
"start_time": 786.955905106, |
|
"end_time": 12920.374618762, |
|
"total_evaluation_time_secondes": "12133.418713656", |
|
"model_name": "inceptionai/jais-family-13b", |
|
"model_sha": "9e5bd6b43f9923cc6217df77ca83a8cdc99f861f", |
|
"model_dtype": "torch.float16", |
|
"model_size": "24.29 GB" |
|
}, |
|
"results": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"acc_norm": 0.28186714542190305, |
|
"acc_norm_stderr": 0.019080389276755805 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"acc_norm": 0.2726598702502317, |
|
"acc_norm_stderr": 0.006063507451856253 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"acc_norm": 0.29720670391061454, |
|
"acc_norm_stderr": 0.015285313353641602 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"acc_norm": 0.7066666666666667, |
|
"acc_norm_stderr": 0.052926375288708395 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04066560309607849 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"acc_norm": 0.3466666666666667, |
|
"acc_norm_stderr": 0.03898794245625698 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"acc_norm": 0.7400875547217011, |
|
"acc_norm_stderr": 0.004905384067849132 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"acc_norm": 0.4552126772310259, |
|
"acc_norm_stderr": 0.006432240901081121 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"acc_norm": 0.3377906976744186, |
|
"acc_norm_stderr": 0.011407317855772224 |
|
}, |
|
"community|arabic_exams|0": { |
|
"acc_norm": 0.24022346368715083, |
|
"acc_norm_stderr": 0.018453054149050775 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"acc_norm": 0.2702702702702703, |
|
"acc_norm_stderr": 0.05197789984508372 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"acc_norm": 0.2042483660130719, |
|
"acc_norm_stderr": 0.016309755848361526 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.22465753424657534, |
|
"acc_norm_stderr": 0.02187542944965106 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"acc_norm": 0.3230769230769231, |
|
"acc_norm_stderr": 0.023710888501970562 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"acc_norm": 0.2962962962962963, |
|
"acc_norm_stderr": 0.0895511888632576 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"acc_norm": 0.43253968253968256, |
|
"acc_norm_stderr": 0.031271150966052506 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"acc_norm": 0.28246983676366216, |
|
"acc_norm_stderr": 0.011997887718994297 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"acc_norm": 0.39080459770114945, |
|
"acc_norm_stderr": 0.052614920082724996 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.028246634143301606 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"acc_norm": 0.2988505747126437, |
|
"acc_norm_stderr": 0.02838871042581256 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.09745089103411436 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"acc_norm": 0.47368421052631576, |
|
"acc_norm_stderr": 0.03631923996538703 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.058397074018894594 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"acc_norm": 0.4434351775392238, |
|
"acc_norm_stderr": 0.014281711730983996 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"acc_norm": 0.2611111111111111, |
|
"acc_norm_stderr": 0.02318221984399588 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"acc_norm": 0.3218390804597701, |
|
"acc_norm_stderr": 0.05037749206122548 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"acc_norm": 0.32116788321167883, |
|
"acc_norm_stderr": 0.040038499303887747 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"acc_norm": 0.34837962962962965, |
|
"acc_norm_stderr": 0.01621878455756233 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"acc_norm": 0.27325581395348836, |
|
"acc_norm_stderr": 0.03407826167337436 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"acc_norm": 0.2716049382716049, |
|
"acc_norm_stderr": 0.035054145852934086 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"acc_norm": 0.33815028901734107, |
|
"acc_norm_stderr": 0.01469079150089222 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"acc_norm": 0.2867647058823529, |
|
"acc_norm_stderr": 0.02747227447323382 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.06446025638903097 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"acc_norm": 0.3263157894736842, |
|
"acc_norm_stderr": 0.01701869835100162 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"acc_norm": 0.2660098522167488, |
|
"acc_norm_stderr": 0.03108982600293752 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.047240073523838896 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"acc_norm": 0.3599374021909233, |
|
"acc_norm_stderr": 0.01900265897476567 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"acc_norm": 0.3263473053892216, |
|
"acc_norm_stderr": 0.02569424876081477 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"acc_norm": 0.3403361344537815, |
|
"acc_norm_stderr": 0.030778057422931673 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"acc_norm": 0.48848848848848847, |
|
"acc_norm_stderr": 0.015823028204038865 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"acc_norm": 0.4713375796178344, |
|
"acc_norm_stderr": 0.02821519686924197 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"acc_norm": 0.4266666666666667, |
|
"acc_norm_stderr": 0.057495266811327245 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"acc_norm": 0.3740831295843521, |
|
"acc_norm_stderr": 0.023955889904969944 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"acc_norm": 0.2768595041322314, |
|
"acc_norm_stderr": 0.028822559103836288 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"acc_norm": 0.3244047619047619, |
|
"acc_norm_stderr": 0.025577877122318127 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"acc_norm": 0.3076923076923077, |
|
"acc_norm_stderr": 0.074871425137275 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"acc_norm": 0.24705882352941178, |
|
"acc_norm_stderr": 0.027062246797977103 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"acc_norm": 0.37142857142857144, |
|
"acc_norm_stderr": 0.03342272296374862 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"acc_norm": 0.3070539419087137, |
|
"acc_norm_stderr": 0.02977497681688396 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"acc_norm": 0.41843971631205673, |
|
"acc_norm_stderr": 0.018592059037882233 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.040943762699967946 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.03925523381052932 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"acc_norm": 0.32075471698113206, |
|
"acc_norm_stderr": 0.028727502957880267 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.03745554791462458 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036846 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"acc_norm": 0.32947976878612717, |
|
"acc_norm_stderr": 0.03583901754736411 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.04724007352383888 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"acc_norm": 0.225531914893617, |
|
"acc_norm_stderr": 0.027321078417387536 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"acc_norm": 0.20175438596491227, |
|
"acc_norm_stderr": 0.037752050135836386 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"acc_norm": 0.31724137931034485, |
|
"acc_norm_stderr": 0.038783523721386215 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"acc_norm": 0.24603174603174602, |
|
"acc_norm_stderr": 0.022182037202948365 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.04006168083848876 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"acc_norm": 0.3191712562286913, |
|
"acc_norm_stderr": 0.007550129173166681 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"acc_norm": 0.3169820717131474, |
|
"acc_norm_stderr": 0.007343289104727924 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"acc_norm": 0.30004906771344453, |
|
"acc_norm_stderr": 0.005076033649882777 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"acc_norm": 0.3383838383838384, |
|
"acc_norm_stderr": 0.03371124142626302 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"acc_norm": 0.36787564766839376, |
|
"acc_norm_stderr": 0.034801756684660366 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"acc_norm": 0.32480110688343133, |
|
"acc_norm_stderr": 0.008711153965380018 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.026067159222275805 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"acc_norm": 0.23109243697478993, |
|
"acc_norm_stderr": 0.027381406927868973 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.03710185726119994 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"acc_norm": 0.3926605504587156, |
|
"acc_norm_stderr": 0.020937505161201086 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"acc_norm": 0.31521739130434784, |
|
"acc_norm_stderr": 0.00714265927694933 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"acc_norm": 0.37254901960784315, |
|
"acc_norm_stderr": 0.03393388584958404 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"acc_norm": 0.4008438818565401, |
|
"acc_norm_stderr": 0.031900803894732356 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"acc_norm": 0.17937219730941703, |
|
"acc_norm_stderr": 0.025749819569192804 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"acc_norm": 0.35877862595419846, |
|
"acc_norm_stderr": 0.04206739313864908 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"acc_norm": 0.4132231404958678, |
|
"acc_norm_stderr": 0.04495087843548408 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"acc_norm": 0.3067484662576687, |
|
"acc_norm_stderr": 0.036230899157241474 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"acc_norm": 0.38392857142857145, |
|
"acc_norm_stderr": 0.04616143075028547 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"acc_norm": 0.3592233009708738, |
|
"acc_norm_stderr": 0.04750458399041692 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"acc_norm": 0.3504273504273504, |
|
"acc_norm_stderr": 0.03125610824421881 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"acc_norm": 0.3293388429752066, |
|
"acc_norm_stderr": 0.009555537483738226 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"acc_norm": 0.33236994219653176, |
|
"acc_norm_stderr": 0.025361168749688204 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"acc_norm": 0.26145251396648045, |
|
"acc_norm_stderr": 0.014696599650364548 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.026787453111906532 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"acc_norm": 0.3247588424437299, |
|
"acc_norm_stderr": 0.026596782287697046 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"acc_norm": 0.345679012345679, |
|
"acc_norm_stderr": 0.026462487777001872 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"acc_norm": 0.3136907399202481, |
|
"acc_norm_stderr": 0.006906819585987513 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"acc_norm": 0.30086390384374606, |
|
"acc_norm_stderr": 0.00513217258995318 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"acc_norm": 0.32009773976786804, |
|
"acc_norm_stderr": 0.011533812754350966 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"acc_norm": 0.3185840707964602, |
|
"acc_norm_stderr": 0.007873359802742633 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.039559328617958335 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"acc_norm": 0.3510204081632653, |
|
"acc_norm_stderr": 0.030555316755573644 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"acc_norm": 0.4129353233830846, |
|
"acc_norm_stderr": 0.03481520803367348 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"acc_norm": 0.23493975903614459, |
|
"acc_norm_stderr": 0.03300533186128922 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"acc_norm": 0.38011695906432746, |
|
"acc_norm_stderr": 0.03722965741385539 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"acc_norm": 0.36764705882352944, |
|
"acc_norm_stderr": 0.019506291693954854 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.29315068493150687, |
|
"acc_norm_stderr": 0.02385932679013149 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.06476816543825593 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"acc_norm": 0.3018867924528302, |
|
"acc_norm_stderr": 0.06366244470090364 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"acc_norm": 0.3684210526315789, |
|
"acc_norm_stderr": 0.05570002760135977 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"acc_norm": 0.42028985507246375, |
|
"acc_norm_stderr": 0.05985844733688651 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"acc_norm": 0.3287671232876712, |
|
"acc_norm_stderr": 0.055362354579827126 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.06299407883487118 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"acc_norm": 0.358974358974359, |
|
"acc_norm_stderr": 0.05466685034759242 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.05620374845754972 |
|
}, |
|
"community|alghafa:_average|0": { |
|
"acc_norm": 0.4309064425048031, |
|
"acc_norm_stderr": 0.021750452638666665 |
|
}, |
|
"community|arabic_mmlu:_average|0": { |
|
"acc_norm": 0.3353392412047633, |
|
"acc_norm_stderr": 0.03506007300141292 |
|
}, |
|
"community|arabic_mmlu_ht:_average|0": { |
|
"acc_norm": 0.31014360900035487, |
|
"acc_norm_stderr": 0.031213869471113 |
|
}, |
|
"community|madinah_qa:_average|0": { |
|
"acc_norm": 0.33039887187751815, |
|
"acc_norm_stderr": 0.021682809242043175 |
|
}, |
|
"community|aratrust:_average|0": { |
|
"acc_norm": 0.34748179174175686, |
|
"acc_norm_stderr": 0.05915201466215579 |
|
}, |
|
"all": { |
|
"acc_norm": 0.330348622949831, |
|
"acc_norm_stderr": 0.03343916532981487, |
|
"llm_as_judge": 0.8015669515669505, |
|
"llm_as_judge_stderr": 0.0001569158880031379 |
|
}, |
|
"community|alrage_qa|0": { |
|
"llm_as_judge": 0.8015669515669505, |
|
"llm_as_judge_stderr": 0.0001569158880031379 |
|
} |
|
}, |
|
"versions": { |
|
"community|alghafa:mcq_exams_test_ar|0": 0, |
|
"community|alghafa:meta_ar_dialects|0": 0, |
|
"community|alghafa:meta_ar_msa|0": 0, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, |
|
"community|alghafa:multiple_choice_sentiment_task|0": 0, |
|
"community|arabic_exams|0": 0, |
|
"community|arabic_mmlu:Accounting (University)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (General)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0, |
|
"community|arabic_mmlu:Biology (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (High School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (University)|0": 0, |
|
"community|arabic_mmlu:Driving Test|0": 0, |
|
"community|arabic_mmlu:Economics (High School)|0": 0, |
|
"community|arabic_mmlu:Economics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Economics (University)|0": 0, |
|
"community|arabic_mmlu:General Knowledge|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0, |
|
"community|arabic_mmlu:Geography (High School)|0": 0, |
|
"community|arabic_mmlu:Geography (Middle School)|0": 0, |
|
"community|arabic_mmlu:Geography (Primary School)|0": 0, |
|
"community|arabic_mmlu:History (High School)|0": 0, |
|
"community|arabic_mmlu:History (Middle School)|0": 0, |
|
"community|arabic_mmlu:History (Primary School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0, |
|
"community|arabic_mmlu:Law (Professional)|0": 0, |
|
"community|arabic_mmlu:Management (University)|0": 0, |
|
"community|arabic_mmlu:Math (Primary School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Philosophy (High School)|0": 0, |
|
"community|arabic_mmlu:Physics (High School)|0": 0, |
|
"community|arabic_mmlu:Political Science (University)|0": 0, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": 0, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": 0, |
|
"community|arabic_mmlu_ht:anatomy|0": 0, |
|
"community|arabic_mmlu_ht:astronomy|0": 0, |
|
"community|arabic_mmlu_ht:business_ethics|0": 0, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": 0, |
|
"community|arabic_mmlu_ht:college_biology|0": 0, |
|
"community|arabic_mmlu_ht:college_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:college_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:college_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:college_medicine|0": 0, |
|
"community|arabic_mmlu_ht:college_physics|0": 0, |
|
"community|arabic_mmlu_ht:computer_security|0": 0, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": 0, |
|
"community|arabic_mmlu_ht:econometrics|0": 0, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": 0, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:formal_logic|0": 0, |
|
"community|arabic_mmlu_ht:global_facts|0": 0, |
|
"community|arabic_mmlu_ht:high_school_biology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_geography|0": 0, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_physics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": 0, |
|
"community|arabic_mmlu_ht:human_aging|0": 0, |
|
"community|arabic_mmlu_ht:human_sexuality|0": 0, |
|
"community|arabic_mmlu_ht:international_law|0": 0, |
|
"community|arabic_mmlu_ht:jurisprudence|0": 0, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": 0, |
|
"community|arabic_mmlu_ht:machine_learning|0": 0, |
|
"community|arabic_mmlu_ht:management|0": 0, |
|
"community|arabic_mmlu_ht:marketing|0": 0, |
|
"community|arabic_mmlu_ht:medical_genetics|0": 0, |
|
"community|arabic_mmlu_ht:miscellaneous|0": 0, |
|
"community|arabic_mmlu_ht:moral_disputes|0": 0, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": 0, |
|
"community|arabic_mmlu_ht:nutrition|0": 0, |
|
"community|arabic_mmlu_ht:philosophy|0": 0, |
|
"community|arabic_mmlu_ht:prehistory|0": 0, |
|
"community|arabic_mmlu_ht:professional_accounting|0": 0, |
|
"community|arabic_mmlu_ht:professional_law|0": 0, |
|
"community|arabic_mmlu_ht:professional_medicine|0": 0, |
|
"community|arabic_mmlu_ht:professional_psychology|0": 0, |
|
"community|arabic_mmlu_ht:public_relations|0": 0, |
|
"community|arabic_mmlu_ht:security_studies|0": 0, |
|
"community|arabic_mmlu_ht:sociology|0": 0, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": 0, |
|
"community|arabic_mmlu_ht:virology|0": 0, |
|
"community|arabic_mmlu_ht:world_religions|0": 0, |
|
"community|aratrust:Ethics|0": 0, |
|
"community|aratrust:Illegal|0": 0, |
|
"community|aratrust:MentalHealth|0": 0, |
|
"community|aratrust:Offensive|0": 0, |
|
"community|aratrust:PhysicalHealth|0": 0, |
|
"community|aratrust:Privacy|0": 0, |
|
"community|aratrust:Trustfulness|0": 0, |
|
"community|aratrust:Unfairness|0": 0, |
|
"community|madinah_qa:Arabic Language (General)|0": 0, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": 0, |
|
"community|alrage_qa|0": 0 |
|
}, |
|
"config_tasks": { |
|
"community|alghafa:mcq_exams_test_ar": { |
|
"name": "alghafa:mcq_exams_test_ar", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "mcq_exams_test_ar", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 557, |
|
"effective_num_docs": 557, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects": { |
|
"name": "alghafa:meta_ar_dialects", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_dialects", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5395, |
|
"effective_num_docs": 5395, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa": { |
|
"name": "alghafa:meta_ar_msa", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_msa", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { |
|
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_soqal_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7995, |
|
"effective_num_docs": 7995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5995, |
|
"effective_num_docs": 5995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task": { |
|
"name": "alghafa:multiple_choice_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1720, |
|
"effective_num_docs": 1720, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_exams": { |
|
"name": "arabic_exams", |
|
"prompt_function": "arabic_exams_pfn", |
|
"hf_repo": "OALL/Arabic_EXAMS", |
|
"hf_subset": "default", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 537, |
|
"effective_num_docs": 537, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)": { |
|
"name": "arabic_mmlu:Accounting (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Accounting (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 74, |
|
"effective_num_docs": 74, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)": { |
|
"name": "arabic_mmlu:Arabic Language (General)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)": { |
|
"name": "arabic_mmlu:Arabic Language (Grammar)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)": { |
|
"name": "arabic_mmlu:Arabic Language (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)": { |
|
"name": "arabic_mmlu:Arabic Language (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)": { |
|
"name": "arabic_mmlu:Arabic Language (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 252, |
|
"effective_num_docs": 252, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)": { |
|
"name": "arabic_mmlu:Biology (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Biology (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1409, |
|
"effective_num_docs": 1409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)": { |
|
"name": "arabic_mmlu:Civics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)": { |
|
"name": "arabic_mmlu:Civics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 236, |
|
"effective_num_docs": 236, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)": { |
|
"name": "arabic_mmlu:Computer Science (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 261, |
|
"effective_num_docs": 261, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)": { |
|
"name": "arabic_mmlu:Computer Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)": { |
|
"name": "arabic_mmlu:Computer Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 190, |
|
"effective_num_docs": 190, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)": { |
|
"name": "arabic_mmlu:Computer Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 64, |
|
"effective_num_docs": 64, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test": { |
|
"name": "arabic_mmlu:Driving Test", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Driving Test", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1211, |
|
"effective_num_docs": 1211, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)": { |
|
"name": "arabic_mmlu:Economics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 360, |
|
"effective_num_docs": 360, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)": { |
|
"name": "arabic_mmlu:Economics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)": { |
|
"name": "arabic_mmlu:Economics (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 137, |
|
"effective_num_docs": 137, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge": { |
|
"name": "arabic_mmlu:General Knowledge", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 864, |
|
"effective_num_docs": 864, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)": { |
|
"name": "arabic_mmlu:General Knowledge (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 172, |
|
"effective_num_docs": 172, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)": { |
|
"name": "arabic_mmlu:General Knowledge (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 162, |
|
"effective_num_docs": 162, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)": { |
|
"name": "arabic_mmlu:Geography (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1038, |
|
"effective_num_docs": 1038, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)": { |
|
"name": "arabic_mmlu:Geography (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)": { |
|
"name": "arabic_mmlu:Geography (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)": { |
|
"name": "arabic_mmlu:History (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 760, |
|
"effective_num_docs": 760, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)": { |
|
"name": "arabic_mmlu:History (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)": { |
|
"name": "arabic_mmlu:History (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies": { |
|
"name": "arabic_mmlu:Islamic Studies", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 639, |
|
"effective_num_docs": 639, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)": { |
|
"name": "arabic_mmlu:Islamic Studies (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 334, |
|
"effective_num_docs": 334, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 999, |
|
"effective_num_docs": 999, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)": { |
|
"name": "arabic_mmlu:Law (Professional)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Law (Professional)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 314, |
|
"effective_num_docs": 314, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)": { |
|
"name": "arabic_mmlu:Management (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Management (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)": { |
|
"name": "arabic_mmlu:Math (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Math (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 409, |
|
"effective_num_docs": 409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)": { |
|
"name": "arabic_mmlu:Natural Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 242, |
|
"effective_num_docs": 242, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)": { |
|
"name": "arabic_mmlu:Natural Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 336, |
|
"effective_num_docs": 336, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)": { |
|
"name": "arabic_mmlu:Philosophy (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Philosophy (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 39, |
|
"effective_num_docs": 39, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)": { |
|
"name": "arabic_mmlu:Physics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Physics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 255, |
|
"effective_num_docs": 255, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)": { |
|
"name": "arabic_mmlu:Political Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Political Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 210, |
|
"effective_num_docs": 210, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)": { |
|
"name": "arabic_mmlu:Social Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 241, |
|
"effective_num_docs": 241, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)": { |
|
"name": "arabic_mmlu:Social Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 705, |
|
"effective_num_docs": 705, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra": { |
|
"name": "arabic_mmlu_ht:abstract_algebra", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy": { |
|
"name": "arabic_mmlu_ht:anatomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy": { |
|
"name": "arabic_mmlu_ht:astronomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics": { |
|
"name": "arabic_mmlu_ht:business_ethics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge": { |
|
"name": "arabic_mmlu_ht:clinical_knowledge", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology": { |
|
"name": "arabic_mmlu_ht:college_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry": { |
|
"name": "arabic_mmlu_ht:college_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science": { |
|
"name": "arabic_mmlu_ht:college_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics": { |
|
"name": "arabic_mmlu_ht:college_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine": { |
|
"name": "arabic_mmlu_ht:college_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics": { |
|
"name": "arabic_mmlu_ht:college_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security": { |
|
"name": "arabic_mmlu_ht:computer_security", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics": { |
|
"name": "arabic_mmlu_ht:conceptual_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics": { |
|
"name": "arabic_mmlu_ht:econometrics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering": { |
|
"name": "arabic_mmlu_ht:electrical_engineering", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics": { |
|
"name": "arabic_mmlu_ht:elementary_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic": { |
|
"name": "arabic_mmlu_ht:formal_logic", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts": { |
|
"name": "arabic_mmlu_ht:global_facts", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology": { |
|
"name": "arabic_mmlu_ht:high_school_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3813, |
|
"effective_num_docs": 3813, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry": { |
|
"name": "arabic_mmlu_ht:high_school_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4016, |
|
"effective_num_docs": 4016, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science": { |
|
"name": "arabic_mmlu_ht:high_school_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history": { |
|
"name": "arabic_mmlu_ht:high_school_european_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 8152, |
|
"effective_num_docs": 8152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography": { |
|
"name": "arabic_mmlu_ht:high_school_geography", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics": { |
|
"name": "arabic_mmlu_ht:high_school_government_and_politics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_macroeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2891, |
|
"effective_num_docs": 2891, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics": { |
|
"name": "arabic_mmlu_ht:high_school_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_microeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics": { |
|
"name": "arabic_mmlu_ht:high_school_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology": { |
|
"name": "arabic_mmlu_ht:high_school_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics": { |
|
"name": "arabic_mmlu_ht:high_school_statistics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4232, |
|
"effective_num_docs": 4232, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history": { |
|
"name": "arabic_mmlu_ht:high_school_us_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history": { |
|
"name": "arabic_mmlu_ht:high_school_world_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging": { |
|
"name": "arabic_mmlu_ht:human_aging", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality": { |
|
"name": "arabic_mmlu_ht:human_sexuality", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law": { |
|
"name": "arabic_mmlu_ht:international_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence": { |
|
"name": "arabic_mmlu_ht:jurisprudence", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies": { |
|
"name": "arabic_mmlu_ht:logical_fallacies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning": { |
|
"name": "arabic_mmlu_ht:machine_learning", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management": { |
|
"name": "arabic_mmlu_ht:management", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "management", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing": { |
|
"name": "arabic_mmlu_ht:marketing", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics": { |
|
"name": "arabic_mmlu_ht:medical_genetics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous": { |
|
"name": "arabic_mmlu_ht:miscellaneous", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2420, |
|
"effective_num_docs": 2420, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes": { |
|
"name": "arabic_mmlu_ht:moral_disputes", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios": { |
|
"name": "arabic_mmlu_ht:moral_scenarios", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition": { |
|
"name": "arabic_mmlu_ht:nutrition", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy": { |
|
"name": "arabic_mmlu_ht:philosophy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory": { |
|
"name": "arabic_mmlu_ht:prehistory", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting": { |
|
"name": "arabic_mmlu_ht:professional_accounting", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4514, |
|
"effective_num_docs": 4514, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law": { |
|
"name": "arabic_mmlu_ht:professional_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7987, |
|
"effective_num_docs": 7987, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine": { |
|
"name": "arabic_mmlu_ht:professional_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1637, |
|
"effective_num_docs": 1637, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology": { |
|
"name": "arabic_mmlu_ht:professional_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3503, |
|
"effective_num_docs": 3503, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations": { |
|
"name": "arabic_mmlu_ht:public_relations", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies": { |
|
"name": "arabic_mmlu_ht:security_studies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology": { |
|
"name": "arabic_mmlu_ht:sociology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy": { |
|
"name": "arabic_mmlu_ht:us_foreign_policy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology": { |
|
"name": "arabic_mmlu_ht:virology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions": { |
|
"name": "arabic_mmlu_ht:world_religions", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Ethics": { |
|
"name": "aratrust:Ethics", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 60, |
|
"effective_num_docs": 60, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Illegal": { |
|
"name": "aratrust:Illegal", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Illegal", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 53, |
|
"effective_num_docs": 53, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:MentalHealth": { |
|
"name": "aratrust:MentalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "MentalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 76, |
|
"effective_num_docs": 76, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Offensive": { |
|
"name": "aratrust:Offensive", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Offensive", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 69, |
|
"effective_num_docs": 69, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth": { |
|
"name": "aratrust:PhysicalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "PhysicalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 73, |
|
"effective_num_docs": 73, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Privacy": { |
|
"name": "aratrust:Privacy", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Privacy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Trustfulness": { |
|
"name": "aratrust:Trustfulness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Trustfulness", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 78, |
|
"effective_num_docs": 78, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Unfairness": { |
|
"name": "aratrust:Unfairness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Unfairness", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 55, |
|
"effective_num_docs": 55, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)": { |
|
"name": "madinah_qa:Arabic Language (General)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)": { |
|
"name": "madinah_qa:Arabic Language (Grammar)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alrage_qa": { |
|
"name": "alrage_qa", |
|
"prompt_function": "qa_prompt_arabic", |
|
"hf_repo": "OALL/ALRAGE", |
|
"hf_subset": null, |
|
"metric": [ |
|
{ |
|
"metric_name": "llm_as_judge", |
|
"higher_is_better": true, |
|
"category": "7", |
|
"use_case": "10", |
|
"sample_level_fn": "_sample_level_fn", |
|
"corpus_level_fn": "aggregate_scores" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": 200, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2106, |
|
"effective_num_docs": 2106, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"hashes": { |
|
"hash_examples": "c07a5e78c5c0b8fe", |
|
"hash_full_prompts": "c07a5e78c5c0b8fe", |
|
"hash_input_tokens": "506e74c5bf7c8bd2", |
|
"hash_cont_tokens": "7f907530d0fd020e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 557, |
|
"padded": 2228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"hashes": { |
|
"hash_examples": "c0b6081f83e14064", |
|
"hash_full_prompts": "c0b6081f83e14064", |
|
"hash_input_tokens": "7c973a499c3088ab", |
|
"hash_cont_tokens": "8dc61709faf12e74" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5395, |
|
"padded": 21441, |
|
"non_padded": 139, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"hashes": { |
|
"hash_examples": "64eb78a7c5b7484b", |
|
"hash_full_prompts": "64eb78a7c5b7484b", |
|
"hash_input_tokens": "675a807b99a95f4e", |
|
"hash_cont_tokens": "d3545e11ef188cd0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3554, |
|
"non_padded": 26, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"hashes": { |
|
"hash_examples": "54fc3502c1c02c06", |
|
"hash_full_prompts": "54fc3502c1c02c06", |
|
"hash_input_tokens": "786a6cea275edb82", |
|
"hash_cont_tokens": "b3d037d09ace8d7c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 150, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"hashes": { |
|
"hash_examples": "46572d83696552ae", |
|
"hash_full_prompts": "46572d83696552ae", |
|
"hash_input_tokens": "e8a07b4736c235de", |
|
"hash_cont_tokens": "0faf53527a163be7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 748, |
|
"non_padded": 2, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"hashes": { |
|
"hash_examples": "f430d97ff715bc1c", |
|
"hash_full_prompts": "f430d97ff715bc1c", |
|
"hash_input_tokens": "6bc7c7d7412d6095", |
|
"hash_cont_tokens": "8280078ee8a38203" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 748, |
|
"non_padded": 2, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"hashes": { |
|
"hash_examples": "6b70a7416584f98c", |
|
"hash_full_prompts": "6b70a7416584f98c", |
|
"hash_input_tokens": "ac17fe66698ac82a", |
|
"hash_cont_tokens": "a288013b4fde6e42" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7995, |
|
"padded": 15990, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "bc2005cc9d2f436e", |
|
"hash_full_prompts": "bc2005cc9d2f436e", |
|
"hash_input_tokens": "ba240f75d4df4bf3", |
|
"hash_cont_tokens": "f1d66eb38974410c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5995, |
|
"padded": 17985, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "6fb0e254ea5945d8", |
|
"hash_full_prompts": "6fb0e254ea5945d8", |
|
"hash_input_tokens": "48a3ab67c755a8e7", |
|
"hash_cont_tokens": "b3419f2698061061" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1720, |
|
"padded": 5148, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_exams|0": { |
|
"hashes": { |
|
"hash_examples": "6d721df351722656", |
|
"hash_full_prompts": "6d721df351722656", |
|
"hash_input_tokens": "bf87d32f0fa1b4be", |
|
"hash_cont_tokens": "171fb57fd6259975" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 537, |
|
"padded": 2148, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"hashes": { |
|
"hash_examples": "30e09697562ff9e7", |
|
"hash_full_prompts": "30e09697562ff9e7", |
|
"hash_input_tokens": "e85d3ed7301f3343", |
|
"hash_cont_tokens": "e0dab4232af29d72" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 74, |
|
"padded": 256, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "bef69fb8b3b75f28", |
|
"hash_full_prompts": "bef69fb8b3b75f28", |
|
"hash_input_tokens": "cb23857395448690", |
|
"hash_cont_tokens": "71c556f84cffa4ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2403, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "bd066a9e6a140a4b", |
|
"hash_full_prompts": "bd066a9e6a140a4b", |
|
"hash_input_tokens": "eec004310aa28a96", |
|
"hash_cont_tokens": "c0033d30d961ae4b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1571, |
|
"non_padded": 17, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a9c2cd9a9929292a", |
|
"hash_full_prompts": "a9c2cd9a9929292a", |
|
"hash_input_tokens": "508df19350ce9bb3", |
|
"hash_cont_tokens": "7b3bbee9ad184f65" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1517, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "2f8a77bbbd0e21ff", |
|
"hash_full_prompts": "2f8a77bbbd0e21ff", |
|
"hash_input_tokens": "cf4e6b41f9295e30", |
|
"hash_cont_tokens": "083f765525254806" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 105, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5eed3da47822539b", |
|
"hash_full_prompts": "5eed3da47822539b", |
|
"hash_input_tokens": "8900fc601a417650", |
|
"hash_cont_tokens": "b6d18e49647d34de" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 252, |
|
"padded": 926, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "91ae6d22a0f0213d", |
|
"hash_full_prompts": "91ae6d22a0f0213d", |
|
"hash_input_tokens": "d567245aad360ea6", |
|
"hash_cont_tokens": "0f2c6a467f8d664d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1409, |
|
"padded": 5052, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "f27bf8791bea2bb9", |
|
"hash_full_prompts": "f27bf8791bea2bb9", |
|
"hash_input_tokens": "823e93560e07bf49", |
|
"hash_cont_tokens": "c216c67b09b242f2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 316, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "74f5bb0098c8916f", |
|
"hash_full_prompts": "74f5bb0098c8916f", |
|
"hash_input_tokens": "04e771b46ec63cfc", |
|
"hash_cont_tokens": "381fb3fe1811bacf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 236, |
|
"padded": 944, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a4278d7b525d46fe", |
|
"hash_full_prompts": "a4278d7b525d46fe", |
|
"hash_input_tokens": "f98284ed7f8923a1", |
|
"hash_cont_tokens": "a32dd534e4c7918a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 261, |
|
"padded": 1006, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "0cb6c07e4b80dfd4", |
|
"hash_full_prompts": "0cb6c07e4b80dfd4", |
|
"hash_input_tokens": "e08401e20e77bb23", |
|
"hash_cont_tokens": "abd7b31d006b486c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 100, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "d96fc1bc32473533", |
|
"hash_full_prompts": "d96fc1bc32473533", |
|
"hash_input_tokens": "63ab6be6618ea5d5", |
|
"hash_cont_tokens": "85d961645561c2de" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 190, |
|
"padded": 476, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "8835587e436cbaff", |
|
"hash_full_prompts": "8835587e436cbaff", |
|
"hash_input_tokens": "7e8393d4f861b8a7", |
|
"hash_cont_tokens": "ed2ee13cabcc847c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 64, |
|
"padded": 255, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"hashes": { |
|
"hash_examples": "7a4c38a2c451d075", |
|
"hash_full_prompts": "7a4c38a2c451d075", |
|
"hash_input_tokens": "406e498382c606ab", |
|
"hash_cont_tokens": "9a89f7f470503149" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1211, |
|
"padded": 3677, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c04c252836601279", |
|
"hash_full_prompts": "c04c252836601279", |
|
"hash_input_tokens": "25fcbb4f0b5bff8d", |
|
"hash_cont_tokens": "aa47e7b59dcc18fc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 360, |
|
"padded": 1390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "18fba1579406b3cc", |
|
"hash_full_prompts": "18fba1579406b3cc", |
|
"hash_input_tokens": "fb4a5bc4d8cedf50", |
|
"hash_cont_tokens": "c58f6191959caa6a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 348, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"hashes": { |
|
"hash_examples": "7c9e86fba8151562", |
|
"hash_full_prompts": "7c9e86fba8151562", |
|
"hash_input_tokens": "74296aae7eb5189c", |
|
"hash_cont_tokens": "3bc234da154e19a9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 137, |
|
"padded": 544, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "acfbe4e1f0314b85", |
|
"hash_full_prompts": "acfbe4e1f0314b85", |
|
"hash_input_tokens": "eb3644a350326210", |
|
"hash_cont_tokens": "9a83fb657c0583cd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 864, |
|
"padded": 3213, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "03cd0ecf10224316", |
|
"hash_full_prompts": "03cd0ecf10224316", |
|
"hash_input_tokens": "f004724889571a12", |
|
"hash_cont_tokens": "e6edd38b7a4ded0c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 172, |
|
"padded": 628, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "c3ee30196e05e122", |
|
"hash_full_prompts": "c3ee30196e05e122", |
|
"hash_input_tokens": "16741672ca50633e", |
|
"hash_cont_tokens": "0ae768c0f75e3e38" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 162, |
|
"padded": 637, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "e2e329d2bdd9fb7b", |
|
"hash_full_prompts": "e2e329d2bdd9fb7b", |
|
"hash_input_tokens": "ae5481277bffb758", |
|
"hash_cont_tokens": "13b706bb0d6ff1d3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1038, |
|
"padded": 4116, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "420b161444291989", |
|
"hash_full_prompts": "420b161444291989", |
|
"hash_input_tokens": "8be535bee92e3b59", |
|
"hash_cont_tokens": "78937fd0da21f9c3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 975, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5bc5ca48a4210899", |
|
"hash_full_prompts": "5bc5ca48a4210899", |
|
"hash_input_tokens": "a0ac9830794ee9e3", |
|
"hash_cont_tokens": "e219726e2cb9bf7b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 57, |
|
"padded": 216, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c7cc37f29311bea1", |
|
"hash_full_prompts": "c7cc37f29311bea1", |
|
"hash_input_tokens": "c8b10f6e332920b0", |
|
"hash_cont_tokens": "8949296fd092f74f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 760, |
|
"padded": 2962, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "5b9f1973337153a2", |
|
"hash_full_prompts": "5b9f1973337153a2", |
|
"hash_input_tokens": "7a83f59b051342bc", |
|
"hash_cont_tokens": "38dc9593aff13b62" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 746, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "af2469847007c1fe", |
|
"hash_full_prompts": "af2469847007c1fe", |
|
"hash_input_tokens": "2edd7e9896219beb", |
|
"hash_cont_tokens": "032555d9f0509b04" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"hashes": { |
|
"hash_examples": "c8da9b2f16a5ea0f", |
|
"hash_full_prompts": "c8da9b2f16a5ea0f", |
|
"hash_input_tokens": "1d6a244ac3669a87", |
|
"hash_cont_tokens": "c9056993c7d0892a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 639, |
|
"padded": 2529, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "efb11bc8ef398117", |
|
"hash_full_prompts": "efb11bc8ef398117", |
|
"hash_input_tokens": "b673311d251d4028", |
|
"hash_cont_tokens": "827575cccc1912fe" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 334, |
|
"padded": 1285, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "9e33ab030eebdb99", |
|
"hash_full_prompts": "9e33ab030eebdb99", |
|
"hash_input_tokens": "25f44dbc9c914662", |
|
"hash_cont_tokens": "4d3a1a4cab8f6227" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 883, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "4167565d878b20eb", |
|
"hash_full_prompts": "4167565d878b20eb", |
|
"hash_input_tokens": "bc5029f97b50ab6b", |
|
"hash_cont_tokens": "73363ea8b701a2aa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 999, |
|
"padded": 3024, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"hashes": { |
|
"hash_examples": "e77f52c8fe4352b3", |
|
"hash_full_prompts": "e77f52c8fe4352b3", |
|
"hash_input_tokens": "dd6f617528ba362e", |
|
"hash_cont_tokens": "8b1afca7f37149c7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 314, |
|
"padded": 1232, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"hashes": { |
|
"hash_examples": "09682649b04b7327", |
|
"hash_full_prompts": "09682649b04b7327", |
|
"hash_input_tokens": "3399f37b4db71df9", |
|
"hash_cont_tokens": "3300fc468d89269b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 200, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "edb027bfae7e76f1", |
|
"hash_full_prompts": "edb027bfae7e76f1", |
|
"hash_input_tokens": "026a266cd548ba90", |
|
"hash_cont_tokens": "f93ec78d435b9bcb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 409, |
|
"padded": 1296, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "96e72c9094c2364c", |
|
"hash_full_prompts": "96e72c9094c2364c", |
|
"hash_input_tokens": "977dd0b02cf26b73", |
|
"hash_cont_tokens": "ffd544d84568fd09" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 242, |
|
"padded": 940, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "69e35bad3dec5a4d", |
|
"hash_full_prompts": "69e35bad3dec5a4d", |
|
"hash_input_tokens": "b562c6992906b6af", |
|
"hash_cont_tokens": "cc029ed4a0a9005b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 336, |
|
"padded": 1228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "dc6ebd484a02fca5", |
|
"hash_full_prompts": "dc6ebd484a02fca5", |
|
"hash_input_tokens": "7d9a0072c56ece76", |
|
"hash_cont_tokens": "5793913bda8bdf54" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 39, |
|
"padded": 156, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "58a1722472c9e644", |
|
"hash_full_prompts": "58a1722472c9e644", |
|
"hash_input_tokens": "8597898be0c53737", |
|
"hash_cont_tokens": "f5c316efe322f4e8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 255, |
|
"padded": 1020, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "07a4ed6aabbdfd1e", |
|
"hash_full_prompts": "07a4ed6aabbdfd1e", |
|
"hash_input_tokens": "2b345e789d287992", |
|
"hash_cont_tokens": "07be73577447c85e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 210, |
|
"padded": 710, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "8ca955902f304664", |
|
"hash_full_prompts": "8ca955902f304664", |
|
"hash_input_tokens": "4eb680d896ea7c5e", |
|
"hash_cont_tokens": "8865b92baf8fab39" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 241, |
|
"padded": 929, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "934025ab3738123c", |
|
"hash_full_prompts": "934025ab3738123c", |
|
"hash_input_tokens": "de7669975e2a3325", |
|
"hash_cont_tokens": "9be352957d11f7a9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 705, |
|
"padded": 2043, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "0b557911f2f6d919", |
|
"hash_full_prompts": "0b557911f2f6d919", |
|
"hash_input_tokens": "20b97854bd1e4779", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "a552d8a0ef294061", |
|
"hash_full_prompts": "a552d8a0ef294061", |
|
"hash_input_tokens": "422032db30fedb0b", |
|
"hash_cont_tokens": "aadcc4349e516652" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 540, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "c4a372d0af7da098", |
|
"hash_full_prompts": "c4a372d0af7da098", |
|
"hash_input_tokens": "99bb82a2cca8254c", |
|
"hash_cont_tokens": "07230ff31f0872ec" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 608, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "9f71d816abf8af7a", |
|
"hash_full_prompts": "9f71d816abf8af7a", |
|
"hash_input_tokens": "3bdd2d29eea58371", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "38303cd765589ef3", |
|
"hash_full_prompts": "38303cd765589ef3", |
|
"hash_input_tokens": "1750fb8178838278", |
|
"hash_cont_tokens": "7416aa8da61efa97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1056, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "dbd9b5d318e60b04", |
|
"hash_full_prompts": "dbd9b5d318e60b04", |
|
"hash_input_tokens": "ffb36c837f3bfb1b", |
|
"hash_cont_tokens": "8114a7b6a5595b14" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 576, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "6f88491d03db8a4c", |
|
"hash_full_prompts": "6f88491d03db8a4c", |
|
"hash_input_tokens": "1e302269b1577583", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "ebfdee5ef2ed5e17", |
|
"hash_full_prompts": "ebfdee5ef2ed5e17", |
|
"hash_input_tokens": "b692d2adf0323d80", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "e3f22cd7712aae2f", |
|
"hash_full_prompts": "e3f22cd7712aae2f", |
|
"hash_input_tokens": "9473f26501c02a75", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "51a5501373afb5a7", |
|
"hash_full_prompts": "51a5501373afb5a7", |
|
"hash_input_tokens": "193972ec596198da", |
|
"hash_cont_tokens": "c45dc7ce557a1ea8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 684, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "2d3e015989b108db", |
|
"hash_full_prompts": "2d3e015989b108db", |
|
"hash_input_tokens": "705225d4546bc134", |
|
"hash_cont_tokens": "06968bcc4e4ac389" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 408, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "f8810eddc38dfee4", |
|
"hash_full_prompts": "f8810eddc38dfee4", |
|
"hash_input_tokens": "8530af65d9c4befe", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "211e32cc43c6b1dc", |
|
"hash_full_prompts": "211e32cc43c6b1dc", |
|
"hash_input_tokens": "cc522fbc4b5009d2", |
|
"hash_cont_tokens": "bd4ed66f5beae2b2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 940, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "810023786b2484d2", |
|
"hash_full_prompts": "810023786b2484d2", |
|
"hash_input_tokens": "873bb3d794b75405", |
|
"hash_cont_tokens": "8d8e5768b10aa092" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 452, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "a222760c93eaa1ee", |
|
"hash_full_prompts": "a222760c93eaa1ee", |
|
"hash_input_tokens": "c6179e225cc3d953", |
|
"hash_cont_tokens": "aa9608a475d96c71" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "4c069aeee64dc227", |
|
"hash_full_prompts": "4c069aeee64dc227", |
|
"hash_input_tokens": "767c8e37ac9921ed", |
|
"hash_cont_tokens": "e84c047bbc984e4f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1508, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "3cb0ccbf8e8a77ae", |
|
"hash_full_prompts": "3cb0ccbf8e8a77ae", |
|
"hash_input_tokens": "19c42172e361a262", |
|
"hash_cont_tokens": "0b2fd095bfe4f2bc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 496, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "c1d039e64ea321b9", |
|
"hash_full_prompts": "c1d039e64ea321b9", |
|
"hash_input_tokens": "5b788356957cd784", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "ddcb8237bb4ba08a", |
|
"hash_full_prompts": "ddcb8237bb4ba08a", |
|
"hash_input_tokens": "3b3bdccc1dbe6270", |
|
"hash_cont_tokens": "00ef6119c6b5ba2a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3813, |
|
"padded": 15212, |
|
"non_padded": 40, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "07061b55c5c436d9", |
|
"hash_full_prompts": "07061b55c5c436d9", |
|
"hash_input_tokens": "7f6c6ab0d4d30d03", |
|
"hash_cont_tokens": "5fad0f2cf6af54f3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4016, |
|
"padded": 16020, |
|
"non_padded": 44, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "8d3405483d5fdcff", |
|
"hash_full_prompts": "8d3405483d5fdcff", |
|
"hash_input_tokens": "f9a84fc4251d0cce", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "031c49a430356414", |
|
"hash_full_prompts": "031c49a430356414", |
|
"hash_input_tokens": "408312e0405077a3", |
|
"hash_cont_tokens": "fc28e30ed7a6522b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 8152, |
|
"padded": 32548, |
|
"non_padded": 60, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "d0ce2b019a66c1de", |
|
"hash_full_prompts": "d0ce2b019a66c1de", |
|
"hash_input_tokens": "621663ba21e669f9", |
|
"hash_cont_tokens": "40940544e453ffe1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 792, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "7d7c6d476d0576b1", |
|
"hash_full_prompts": "7d7c6d476d0576b1", |
|
"hash_input_tokens": "588bc9e8dff88b8a", |
|
"hash_cont_tokens": "495e44eef731dbcb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 772, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "694d3a01c6144ddb", |
|
"hash_full_prompts": "694d3a01c6144ddb", |
|
"hash_input_tokens": "03a2f91537cac3cb", |
|
"hash_cont_tokens": "879b46d7d01be1cb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2891, |
|
"padded": 11446, |
|
"non_padded": 118, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "004f9c0a40b5ec10", |
|
"hash_full_prompts": "004f9c0a40b5ec10", |
|
"hash_input_tokens": "65ee53804edd24e6", |
|
"hash_cont_tokens": "2ba3118375333a2b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1064, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "80cf03d462e6ccbc", |
|
"hash_full_prompts": "80cf03d462e6ccbc", |
|
"hash_input_tokens": "31d2c2bd4c66936e", |
|
"hash_cont_tokens": "17b4f1cea85f2bd5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 948, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "92218def5b383845", |
|
"hash_full_prompts": "92218def5b383845", |
|
"hash_input_tokens": "ad08b4a8431e8850", |
|
"hash_cont_tokens": "43e4e89197fee815" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 604, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "323f7848fee32e58", |
|
"hash_full_prompts": "323f7848fee32e58", |
|
"hash_input_tokens": "56f1573dd70ce8b9", |
|
"hash_cont_tokens": "152b078c06405b91" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2144, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "d7bbe0d037cf31ec", |
|
"hash_full_prompts": "d7bbe0d037cf31ec", |
|
"hash_input_tokens": "99134b1347c66b55", |
|
"hash_cont_tokens": "29186713f90f627e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4232, |
|
"padded": 16528, |
|
"non_padded": 400, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "722ec9207e3b0e04", |
|
"hash_full_prompts": "722ec9207e3b0e04", |
|
"hash_input_tokens": "fc537aa16f0d24cb", |
|
"hash_cont_tokens": "263ab93f79738281" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "b5eb675d3b578584", |
|
"hash_full_prompts": "b5eb675d3b578584", |
|
"hash_input_tokens": "7709e67df59e5558", |
|
"hash_cont_tokens": "b36a1d094b383c22" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "713ac79cd2dd2d7b", |
|
"hash_full_prompts": "713ac79cd2dd2d7b", |
|
"hash_input_tokens": "186eba69238d9e99", |
|
"hash_cont_tokens": "604647f8937f2db3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 880, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "47551ab4e5dcf6c5", |
|
"hash_full_prompts": "47551ab4e5dcf6c5", |
|
"hash_input_tokens": "0df1af80db74de3a", |
|
"hash_cont_tokens": "a052991593548191" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 512, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "da360336943398d5", |
|
"hash_full_prompts": "da360336943398d5", |
|
"hash_input_tokens": "f09c2e2943fb0e76", |
|
"hash_cont_tokens": "32f8b673e9f2e20e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 480, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "661d161a486fb035", |
|
"hash_full_prompts": "661d161a486fb035", |
|
"hash_input_tokens": "790f1d6048f359e7", |
|
"hash_cont_tokens": "774487d4b14fb513" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 428, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "5c3926384758bda7", |
|
"hash_full_prompts": "5c3926384758bda7", |
|
"hash_input_tokens": "6bad9e71713e80ef", |
|
"hash_cont_tokens": "e75f9e7b5bf73c10" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 640, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "3ce756e6a22ffc48", |
|
"hash_full_prompts": "3ce756e6a22ffc48", |
|
"hash_input_tokens": "c90a2905e2558602", |
|
"hash_cont_tokens": "e6022ac362a179e8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 444, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"hashes": { |
|
"hash_examples": "20fe769bb3276832", |
|
"hash_full_prompts": "20fe769bb3276832", |
|
"hash_input_tokens": "61fcf279e71b2a60", |
|
"hash_cont_tokens": "c1826c98b1a4b2d8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 400, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "6b19449559d987ce", |
|
"hash_full_prompts": "6b19449559d987ce", |
|
"hash_input_tokens": "da3f6dc875487b34", |
|
"hash_cont_tokens": "1073d747ed4eb9eb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 908, |
|
"non_padded": 28, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "cbb0fa9df0f5435a", |
|
"hash_full_prompts": "cbb0fa9df0f5435a", |
|
"hash_input_tokens": "31c62d188bbf867f", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "0a4134046c23cff9", |
|
"hash_full_prompts": "0a4134046c23cff9", |
|
"hash_input_tokens": "c1df5e0c987b94e1", |
|
"hash_cont_tokens": "253616f7cbe66a88" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2420, |
|
"padded": 9376, |
|
"non_padded": 304, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "1ac8a0967c82caa0", |
|
"hash_full_prompts": "1ac8a0967c82caa0", |
|
"hash_input_tokens": "e7c8afd469ff08ec", |
|
"hash_cont_tokens": "ab7fb56161a8db07" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1360, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "2c0670188bc5a789", |
|
"hash_full_prompts": "2c0670188bc5a789", |
|
"hash_input_tokens": "e981965af19e9eed", |
|
"hash_cont_tokens": "22f9ffedec6559bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3576, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "658628c0dcdfe201", |
|
"hash_full_prompts": "658628c0dcdfe201", |
|
"hash_input_tokens": "59d828afa6b0163a", |
|
"hash_cont_tokens": "b63821693d1401cf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1192, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "8b6707b322affafd", |
|
"hash_full_prompts": "8b6707b322affafd", |
|
"hash_input_tokens": "05eba706389361bc", |
|
"hash_cont_tokens": "e8152b624592d54e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1208, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "0c85ffcdc9a7b367", |
|
"hash_full_prompts": "0c85ffcdc9a7b367", |
|
"hash_input_tokens": "8584f35f748e266c", |
|
"hash_cont_tokens": "ebdfb89e8e6223cd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1264, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "cce1ea2d5f544b2f", |
|
"hash_full_prompts": "cce1ea2d5f544b2f", |
|
"hash_input_tokens": "ca1a367db929ef44", |
|
"hash_cont_tokens": "0f9530ef72a50444" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4514, |
|
"padded": 17652, |
|
"non_padded": 404, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "1c654b024b54eb4b", |
|
"hash_full_prompts": "1c654b024b54eb4b", |
|
"hash_input_tokens": "b250b8d230164256", |
|
"hash_cont_tokens": "a4609eed2018f00a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7987, |
|
"padded": 31435, |
|
"non_padded": 513, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "c621eaacfa662ebc", |
|
"hash_full_prompts": "c621eaacfa662ebc", |
|
"hash_input_tokens": "5479ccfc70d0c313", |
|
"hash_cont_tokens": "417b08d291a5b1fb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1637, |
|
"padded": 6284, |
|
"non_padded": 264, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "bc14a28eaec87dc4", |
|
"hash_full_prompts": "bc14a28eaec87dc4", |
|
"hash_input_tokens": "85b3beb2971b7387", |
|
"hash_cont_tokens": "57b90fe8499adc0f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3503, |
|
"padded": 13452, |
|
"non_padded": 560, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "de4989d9375885c4", |
|
"hash_full_prompts": "de4989d9375885c4", |
|
"hash_input_tokens": "f35505d63ec6f996", |
|
"hash_cont_tokens": "f44ed4a22ec701bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 428, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "3f84bfeec717c6de", |
|
"hash_full_prompts": "3f84bfeec717c6de", |
|
"hash_input_tokens": "8e7b0a3dabf3f484", |
|
"hash_cont_tokens": "2713a00f0fb6afb0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 976, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "10d7c2fae10bfcbc", |
|
"hash_full_prompts": "10d7c2fae10bfcbc", |
|
"hash_input_tokens": "25f70c94f46960e3", |
|
"hash_cont_tokens": "8ad80cf0171e225e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 788, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "bb05f02c38ddaf1a", |
|
"hash_full_prompts": "bb05f02c38ddaf1a", |
|
"hash_input_tokens": "7d664c78b8db957e", |
|
"hash_cont_tokens": "af3e50f7467546e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 380, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"hashes": { |
|
"hash_examples": "290915a48884ede2", |
|
"hash_full_prompts": "290915a48884ede2", |
|
"hash_input_tokens": "f333e642eccd6759", |
|
"hash_cont_tokens": "1456f6d23eaed06e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 628, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "91cc5451c7284f75", |
|
"hash_full_prompts": "91cc5451c7284f75", |
|
"hash_input_tokens": "bb0c5643b82c0fb4", |
|
"hash_cont_tokens": "c1b74e11a61eb745" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 628, |
|
"non_padded": 56, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "25bf94d05f737b63", |
|
"hash_full_prompts": "25bf94d05f737b63", |
|
"hash_input_tokens": "defefb69ca7dc3fd", |
|
"hash_cont_tokens": "54c497cb55b112b6" |
|
}, |
|
"truncated": 264, |
|
"non_truncated": 348, |
|
"padded": 2130, |
|
"non_padded": 273, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "e65fe4df843f4380", |
|
"hash_full_prompts": "e65fe4df843f4380", |
|
"hash_input_tokens": "d07f1d8a5ecd1399", |
|
"hash_cont_tokens": "bff89743b63ad11c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1569, |
|
"non_padded": 19, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"hashes": { |
|
"hash_examples": "5d32da36271c5eb4", |
|
"hash_full_prompts": "5d32da36271c5eb4", |
|
"hash_input_tokens": "e1c314ac8b15ad13", |
|
"hash_cont_tokens": "a61c88e6874ea0f3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 60, |
|
"padded": 180, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"hashes": { |
|
"hash_examples": "0c07f1f100f2d0e8", |
|
"hash_full_prompts": "0c07f1f100f2d0e8", |
|
"hash_input_tokens": "afb41343537e66a5", |
|
"hash_cont_tokens": "ce02a24a3b6deaf2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 53, |
|
"padded": 159, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "8e5fc5c4704bd96b", |
|
"hash_full_prompts": "8e5fc5c4704bd96b", |
|
"hash_input_tokens": "24697f4654ad26c0", |
|
"hash_cont_tokens": "507159565a90ee58" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 76, |
|
"padded": 225, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"hashes": { |
|
"hash_examples": "5ad4369b7dc5de46", |
|
"hash_full_prompts": "5ad4369b7dc5de46", |
|
"hash_input_tokens": "dbec033fea80660f", |
|
"hash_cont_tokens": "0dd1d1830ca63474" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 69, |
|
"padded": 204, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "dc2a632e2dcc86db", |
|
"hash_full_prompts": "dc2a632e2dcc86db", |
|
"hash_input_tokens": "2dbbae2b974fd3cc", |
|
"hash_cont_tokens": "2d67dec7c6bea675" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 73, |
|
"padded": 210, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"hashes": { |
|
"hash_examples": "295e35448a39e003", |
|
"hash_full_prompts": "295e35448a39e003", |
|
"hash_input_tokens": "5e1378b4bb084cc6", |
|
"hash_cont_tokens": "e84f7d85ef83afe0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 57, |
|
"padded": 162, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"hashes": { |
|
"hash_examples": "e79ac1ea5439e623", |
|
"hash_full_prompts": "e79ac1ea5439e623", |
|
"hash_input_tokens": "5f29e65e5daea906", |
|
"hash_cont_tokens": "8accfcabf2f5f70d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 78, |
|
"padded": 234, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"hashes": { |
|
"hash_examples": "4ac5dccbfbdc5077", |
|
"hash_full_prompts": "4ac5dccbfbdc5077", |
|
"hash_input_tokens": "1e54c89217245640", |
|
"hash_cont_tokens": "03a04d63241fa57b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 55, |
|
"padded": 162, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alrage_qa|0": { |
|
"hashes": { |
|
"hash_examples": "3edbbe22cabd4160", |
|
"hash_full_prompts": "3edbbe22cabd4160", |
|
"hash_input_tokens": "bcfab0d9075ee52a", |
|
"hash_cont_tokens": "3c829a9e3b3651e9" |
|
}, |
|
"truncated": 2106, |
|
"non_truncated": 0, |
|
"padded": 2104, |
|
"non_padded": 2, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "b8b3b49631adcc40", |
|
"hash_full_prompts": "b8b3b49631adcc40", |
|
"hash_input_tokens": "0cbdab148e62277f", |
|
"hash_cont_tokens": "9280fb1359977a73" |
|
}, |
|
"truncated": 120, |
|
"non_truncated": 91742, |
|
"padded": 333340, |
|
"non_padded": 3544, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |