|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null, |
|
"job_id": 0, |
|
"start_time": 757.68211351, |
|
"end_time": 5568.814069329, |
|
"total_evaluation_time_secondes": "4811.131955819", |
|
"model_name": "SeaLLMs/SeaLLM-7B-v2.5", |
|
"model_sha": "a961daf713dcb31e3253ebe40d43ea5fb7a84099", |
|
"model_dtype": "torch.float16", |
|
"model_size": "15.9 GB" |
|
}, |
|
"results": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"acc_norm": 0.362657091561939, |
|
"acc_norm_stderr": 0.020389063116959684 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"acc_norm": 0.38832252085264135, |
|
"acc_norm_stderr": 0.006635934085053933 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"acc_norm": 0.44692737430167595, |
|
"acc_norm_stderr": 0.01662803003964761 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.05807730170189531 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.038618963090899454 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.04092881363092387 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"acc_norm": 0.7962476547842402, |
|
"acc_norm_stderr": 0.004504984688612958 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"acc_norm": 0.5321100917431193, |
|
"acc_norm_stderr": 0.0064448708309838895 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"acc_norm": 0.4011627906976744, |
|
"acc_norm_stderr": 0.011821613941025114 |
|
}, |
|
"community|arabic_exams|0": { |
|
"acc_norm": 0.4823091247672253, |
|
"acc_norm_stderr": 0.021583188287808124 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"acc_norm": 0.5405405405405406, |
|
"acc_norm_stderr": 0.05832789513012364 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"acc_norm": 0.5343137254901961, |
|
"acc_norm_stderr": 0.020180144843307293 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.5041095890410959, |
|
"acc_norm_stderr": 0.026206235693533635 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"acc_norm": 0.36923076923076925, |
|
"acc_norm_stderr": 0.02446861524147892 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"acc_norm": 0.5555555555555556, |
|
"acc_norm_stderr": 0.09745089103411436 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"acc_norm": 0.5436507936507936, |
|
"acc_norm_stderr": 0.031439222858062964 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"acc_norm": 0.45635202271114267, |
|
"acc_norm_stderr": 0.013274175363837284 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"acc_norm": 0.4367816091954023, |
|
"acc_norm_stderr": 0.05348368965287096 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"acc_norm": 0.4872881355932203, |
|
"acc_norm_stderr": 0.03260586088180842 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"acc_norm": 0.6283524904214559, |
|
"acc_norm_stderr": 0.029969576766255876 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"acc_norm": 0.7777777777777778, |
|
"acc_norm_stderr": 0.08153326507837143 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"acc_norm": 0.6789473684210526, |
|
"acc_norm_stderr": 0.03396059335824887 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"acc_norm": 0.65625, |
|
"acc_norm_stderr": 0.05983919423477113 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"acc_norm": 0.6383154417836499, |
|
"acc_norm_stderr": 0.013813062405862832 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"acc_norm": 0.5694444444444444, |
|
"acc_norm_stderr": 0.0261332278235689 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"acc_norm": 0.7586206896551724, |
|
"acc_norm_stderr": 0.046143776682648914 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"acc_norm": 0.5328467153284672, |
|
"acc_norm_stderr": 0.04278203076713146 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"acc_norm": 0.5370370370370371, |
|
"acc_norm_stderr": 0.016973439065349077 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"acc_norm": 0.622093023255814, |
|
"acc_norm_stderr": 0.037078492187232796 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"acc_norm": 0.6234567901234568, |
|
"acc_norm_stderr": 0.03818542704145086 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"acc_norm": 0.4903660886319846, |
|
"acc_norm_stderr": 0.01552386993797813 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"acc_norm": 0.6213235294117647, |
|
"acc_norm_stderr": 0.029465133639776122 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"acc_norm": 0.6491228070175439, |
|
"acc_norm_stderr": 0.0637744894736816 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"acc_norm": 0.4026315789473684, |
|
"acc_norm_stderr": 0.017801400019764255 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"acc_norm": 0.6059113300492611, |
|
"acc_norm_stderr": 0.03438157967036545 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"acc_norm": 0.5980392156862745, |
|
"acc_norm_stderr": 0.04878608714466996 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"acc_norm": 0.38184663536776214, |
|
"acc_norm_stderr": 0.019234560328031086 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"acc_norm": 0.6167664670658682, |
|
"acc_norm_stderr": 0.026642195538092495 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"acc_norm": 0.5882352941176471, |
|
"acc_norm_stderr": 0.031968769891957786 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"acc_norm": 0.6836836836836837, |
|
"acc_norm_stderr": 0.014720519411075625 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"acc_norm": 0.6592356687898089, |
|
"acc_norm_stderr": 0.026790156135546746 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"acc_norm": 0.6533333333333333, |
|
"acc_norm_stderr": 0.055323237980623136 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"acc_norm": 0.5183374083129584, |
|
"acc_norm_stderr": 0.024737035595238634 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"acc_norm": 0.7479338842975206, |
|
"acc_norm_stderr": 0.027969217594548755 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"acc_norm": 0.8184523809523809, |
|
"acc_norm_stderr": 0.02106054393039409 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"acc_norm": 0.5897435897435898, |
|
"acc_norm_stderr": 0.0797934979708204 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"acc_norm": 0.4392156862745098, |
|
"acc_norm_stderr": 0.031140099171282798 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"acc_norm": 0.5190476190476191, |
|
"acc_norm_stderr": 0.034560617865111484 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"acc_norm": 0.46473029045643155, |
|
"acc_norm_stderr": 0.0321944643008644 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"acc_norm": 0.6950354609929078, |
|
"acc_norm_stderr": 0.017351691730272033 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.04292596718256981 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"acc_norm": 0.5460526315789473, |
|
"acc_norm_stderr": 0.04051646342874143 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"acc_norm": 0.4867924528301887, |
|
"acc_norm_stderr": 0.030762134874500482 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04181210050035455 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.03812400565974833 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.047240073523838876 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"acc_norm": 0.5106382978723404, |
|
"acc_norm_stderr": 0.03267862331014063 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04434600701584925 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"acc_norm": 0.5724137931034483, |
|
"acc_norm_stderr": 0.04122737111370332 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.024594975128920938 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"acc_norm": 0.42063492063492064, |
|
"acc_norm_stderr": 0.04415438226743744 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"acc_norm": 0.5022292158405455, |
|
"acc_norm_stderr": 0.008098213867115681 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"acc_norm": 0.4972609561752988, |
|
"acc_norm_stderr": 0.00789079413237891 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"acc_norm": 0.4344946025515211, |
|
"acc_norm_stderr": 0.005490414382598177 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"acc_norm": 0.6313131313131313, |
|
"acc_norm_stderr": 0.034373055019806184 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"acc_norm": 0.5440414507772021, |
|
"acc_norm_stderr": 0.03594413711272438 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"acc_norm": 0.5112417848495331, |
|
"acc_norm_stderr": 0.009298465518259176 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.026593939101844065 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"acc_norm": 0.48739495798319327, |
|
"acc_norm_stderr": 0.032468167657521745 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"acc_norm": 0.6146788990825688, |
|
"acc_norm_stderr": 0.020865850852794125 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"acc_norm": 0.4881852551984877, |
|
"acc_norm_stderr": 0.007684705222223152 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"acc_norm": 0.5833333333333334, |
|
"acc_norm_stderr": 0.03460228327239171 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"acc_norm": 0.7046413502109705, |
|
"acc_norm_stderr": 0.029696338713422882 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"acc_norm": 0.5381165919282511, |
|
"acc_norm_stderr": 0.033460150119732274 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"acc_norm": 0.5038167938931297, |
|
"acc_norm_stderr": 0.043851623256015534 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"acc_norm": 0.6528925619834711, |
|
"acc_norm_stderr": 0.043457245702925335 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"acc_norm": 0.5214723926380368, |
|
"acc_norm_stderr": 0.03924746876751129 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.04595091388086298 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"acc_norm": 0.5533980582524272, |
|
"acc_norm_stderr": 0.04922424153458933 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"acc_norm": 0.717948717948718, |
|
"acc_norm_stderr": 0.02948036054954119 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.04960449637488584 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"acc_norm": 0.5148760330578512, |
|
"acc_norm_stderr": 0.010161545577944492 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"acc_norm": 0.5346820809248555, |
|
"acc_norm_stderr": 0.026854257928258896 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"acc_norm": 0.30614525139664805, |
|
"acc_norm_stderr": 0.015414494487903215 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"acc_norm": 0.545751633986928, |
|
"acc_norm_stderr": 0.02850980780262659 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"acc_norm": 0.4855305466237942, |
|
"acc_norm_stderr": 0.02838619808417768 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"acc_norm": 0.4567901234567901, |
|
"acc_norm_stderr": 0.027716661650194038 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"acc_norm": 0.47873283119184756, |
|
"acc_norm_stderr": 0.007436081234647067 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"acc_norm": 0.43170151496181297, |
|
"acc_norm_stderr": 0.00554262354849565 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"acc_norm": 0.49114233353695785, |
|
"acc_norm_stderr": 0.012359764503121932 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"acc_norm": 0.49528975164145017, |
|
"acc_norm_stderr": 0.008448753934740847 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04789131426105757 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"acc_norm": 0.563265306122449, |
|
"acc_norm_stderr": 0.031751952375833226 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"acc_norm": 0.6417910447761194, |
|
"acc_norm_stderr": 0.03390393042268814 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"acc_norm": 0.4759036144578313, |
|
"acc_norm_stderr": 0.03887971849597264 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"acc_norm": 0.5789473684210527, |
|
"acc_norm_stderr": 0.03786720706234215 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"acc_norm": 0.5261437908496732, |
|
"acc_norm_stderr": 0.020200164564804588 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.4520547945205479, |
|
"acc_norm_stderr": 0.026086355811749208 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"acc_norm": 0.7333333333333333, |
|
"acc_norm_stderr": 0.05757170261178307 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"acc_norm": 0.8679245283018868, |
|
"acc_norm_stderr": 0.0469515997666816 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"acc_norm": 0.8947368421052632, |
|
"acc_norm_stderr": 0.035436859432849376 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"acc_norm": 0.7391304347826086, |
|
"acc_norm_stderr": 0.05324977701702559 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"acc_norm": 0.7945205479452054, |
|
"acc_norm_stderr": 0.0476179306269731 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"acc_norm": 0.9122807017543859, |
|
"acc_norm_stderr": 0.037802263481198896 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"acc_norm": 0.717948717948718, |
|
"acc_norm_stderr": 0.05128205128205125 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"acc_norm": 0.8, |
|
"acc_norm_stderr": 0.054433105395181716 |
|
}, |
|
"community|alghafa:_average|0": { |
|
"acc_norm": 0.5148993545119952, |
|
"acc_norm_stderr": 0.022672175014000207 |
|
}, |
|
"community|arabic_mmlu:_average|0": { |
|
"acc_norm": 0.5798489117859316, |
|
"acc_norm_stderr": 0.035926699586003114 |
|
}, |
|
"community|arabic_mmlu_ht:_average|0": { |
|
"acc_norm": 0.486949216147901, |
|
"acc_norm_stderr": 0.03325678163195883 |
|
}, |
|
"community|madinah_qa:_average|0": { |
|
"acc_norm": 0.48909929268511054, |
|
"acc_norm_stderr": 0.0231432601882769 |
|
}, |
|
"community|aratrust:_average|0": { |
|
"acc_norm": 0.8074843882714251, |
|
"acc_norm_stderr": 0.04804316120171808 |
|
}, |
|
"all": { |
|
"acc_norm": 0.5427738358870462, |
|
"acc_norm_stderr": 0.034093753075776806, |
|
"llm_as_judge": 0.6792022792022778, |
|
"llm_as_judge_stderr": 0.00015812708387234105 |
|
}, |
|
"community|alrage_qa|0": { |
|
"llm_as_judge": 0.6792022792022778, |
|
"llm_as_judge_stderr": 0.00015812708387234105 |
|
} |
|
}, |
|
"versions": { |
|
"community|alghafa:mcq_exams_test_ar|0": 0, |
|
"community|alghafa:meta_ar_dialects|0": 0, |
|
"community|alghafa:meta_ar_msa|0": 0, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, |
|
"community|alghafa:multiple_choice_sentiment_task|0": 0, |
|
"community|arabic_exams|0": 0, |
|
"community|arabic_mmlu:Accounting (University)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (General)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0, |
|
"community|arabic_mmlu:Biology (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (High School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (University)|0": 0, |
|
"community|arabic_mmlu:Driving Test|0": 0, |
|
"community|arabic_mmlu:Economics (High School)|0": 0, |
|
"community|arabic_mmlu:Economics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Economics (University)|0": 0, |
|
"community|arabic_mmlu:General Knowledge|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0, |
|
"community|arabic_mmlu:Geography (High School)|0": 0, |
|
"community|arabic_mmlu:Geography (Middle School)|0": 0, |
|
"community|arabic_mmlu:Geography (Primary School)|0": 0, |
|
"community|arabic_mmlu:History (High School)|0": 0, |
|
"community|arabic_mmlu:History (Middle School)|0": 0, |
|
"community|arabic_mmlu:History (Primary School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0, |
|
"community|arabic_mmlu:Law (Professional)|0": 0, |
|
"community|arabic_mmlu:Management (University)|0": 0, |
|
"community|arabic_mmlu:Math (Primary School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Philosophy (High School)|0": 0, |
|
"community|arabic_mmlu:Physics (High School)|0": 0, |
|
"community|arabic_mmlu:Political Science (University)|0": 0, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": 0, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": 0, |
|
"community|arabic_mmlu_ht:anatomy|0": 0, |
|
"community|arabic_mmlu_ht:astronomy|0": 0, |
|
"community|arabic_mmlu_ht:business_ethics|0": 0, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": 0, |
|
"community|arabic_mmlu_ht:college_biology|0": 0, |
|
"community|arabic_mmlu_ht:college_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:college_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:college_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:college_medicine|0": 0, |
|
"community|arabic_mmlu_ht:college_physics|0": 0, |
|
"community|arabic_mmlu_ht:computer_security|0": 0, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": 0, |
|
"community|arabic_mmlu_ht:econometrics|0": 0, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": 0, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:formal_logic|0": 0, |
|
"community|arabic_mmlu_ht:global_facts|0": 0, |
|
"community|arabic_mmlu_ht:high_school_biology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_geography|0": 0, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_physics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": 0, |
|
"community|arabic_mmlu_ht:human_aging|0": 0, |
|
"community|arabic_mmlu_ht:human_sexuality|0": 0, |
|
"community|arabic_mmlu_ht:international_law|0": 0, |
|
"community|arabic_mmlu_ht:jurisprudence|0": 0, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": 0, |
|
"community|arabic_mmlu_ht:machine_learning|0": 0, |
|
"community|arabic_mmlu_ht:management|0": 0, |
|
"community|arabic_mmlu_ht:marketing|0": 0, |
|
"community|arabic_mmlu_ht:medical_genetics|0": 0, |
|
"community|arabic_mmlu_ht:miscellaneous|0": 0, |
|
"community|arabic_mmlu_ht:moral_disputes|0": 0, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": 0, |
|
"community|arabic_mmlu_ht:nutrition|0": 0, |
|
"community|arabic_mmlu_ht:philosophy|0": 0, |
|
"community|arabic_mmlu_ht:prehistory|0": 0, |
|
"community|arabic_mmlu_ht:professional_accounting|0": 0, |
|
"community|arabic_mmlu_ht:professional_law|0": 0, |
|
"community|arabic_mmlu_ht:professional_medicine|0": 0, |
|
"community|arabic_mmlu_ht:professional_psychology|0": 0, |
|
"community|arabic_mmlu_ht:public_relations|0": 0, |
|
"community|arabic_mmlu_ht:security_studies|0": 0, |
|
"community|arabic_mmlu_ht:sociology|0": 0, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": 0, |
|
"community|arabic_mmlu_ht:virology|0": 0, |
|
"community|arabic_mmlu_ht:world_religions|0": 0, |
|
"community|aratrust:Ethics|0": 0, |
|
"community|aratrust:Illegal|0": 0, |
|
"community|aratrust:MentalHealth|0": 0, |
|
"community|aratrust:Offensive|0": 0, |
|
"community|aratrust:PhysicalHealth|0": 0, |
|
"community|aratrust:Privacy|0": 0, |
|
"community|aratrust:Trustfulness|0": 0, |
|
"community|aratrust:Unfairness|0": 0, |
|
"community|madinah_qa:Arabic Language (General)|0": 0, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": 0, |
|
"community|alrage_qa|0": 0 |
|
}, |
|
"config_tasks": { |
|
"community|alghafa:mcq_exams_test_ar": { |
|
"name": "alghafa:mcq_exams_test_ar", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "mcq_exams_test_ar", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 557, |
|
"effective_num_docs": 557, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects": { |
|
"name": "alghafa:meta_ar_dialects", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_dialects", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5395, |
|
"effective_num_docs": 5395, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa": { |
|
"name": "alghafa:meta_ar_msa", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_msa", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { |
|
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_soqal_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7995, |
|
"effective_num_docs": 7995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5995, |
|
"effective_num_docs": 5995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task": { |
|
"name": "alghafa:multiple_choice_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1720, |
|
"effective_num_docs": 1720, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_exams": { |
|
"name": "arabic_exams", |
|
"prompt_function": "arabic_exams_pfn", |
|
"hf_repo": "OALL/Arabic_EXAMS", |
|
"hf_subset": "default", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 537, |
|
"effective_num_docs": 537, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)": { |
|
"name": "arabic_mmlu:Accounting (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Accounting (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 74, |
|
"effective_num_docs": 74, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)": { |
|
"name": "arabic_mmlu:Arabic Language (General)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)": { |
|
"name": "arabic_mmlu:Arabic Language (Grammar)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)": { |
|
"name": "arabic_mmlu:Arabic Language (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)": { |
|
"name": "arabic_mmlu:Arabic Language (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)": { |
|
"name": "arabic_mmlu:Arabic Language (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 252, |
|
"effective_num_docs": 252, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)": { |
|
"name": "arabic_mmlu:Biology (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Biology (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1409, |
|
"effective_num_docs": 1409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)": { |
|
"name": "arabic_mmlu:Civics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)": { |
|
"name": "arabic_mmlu:Civics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 236, |
|
"effective_num_docs": 236, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)": { |
|
"name": "arabic_mmlu:Computer Science (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 261, |
|
"effective_num_docs": 261, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)": { |
|
"name": "arabic_mmlu:Computer Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)": { |
|
"name": "arabic_mmlu:Computer Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 190, |
|
"effective_num_docs": 190, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)": { |
|
"name": "arabic_mmlu:Computer Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 64, |
|
"effective_num_docs": 64, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test": { |
|
"name": "arabic_mmlu:Driving Test", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Driving Test", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1211, |
|
"effective_num_docs": 1211, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)": { |
|
"name": "arabic_mmlu:Economics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 360, |
|
"effective_num_docs": 360, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)": { |
|
"name": "arabic_mmlu:Economics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)": { |
|
"name": "arabic_mmlu:Economics (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 137, |
|
"effective_num_docs": 137, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge": { |
|
"name": "arabic_mmlu:General Knowledge", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 864, |
|
"effective_num_docs": 864, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)": { |
|
"name": "arabic_mmlu:General Knowledge (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 172, |
|
"effective_num_docs": 172, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)": { |
|
"name": "arabic_mmlu:General Knowledge (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 162, |
|
"effective_num_docs": 162, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)": { |
|
"name": "arabic_mmlu:Geography (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1038, |
|
"effective_num_docs": 1038, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)": { |
|
"name": "arabic_mmlu:Geography (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)": { |
|
"name": "arabic_mmlu:Geography (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)": { |
|
"name": "arabic_mmlu:History (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 760, |
|
"effective_num_docs": 760, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)": { |
|
"name": "arabic_mmlu:History (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)": { |
|
"name": "arabic_mmlu:History (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies": { |
|
"name": "arabic_mmlu:Islamic Studies", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 639, |
|
"effective_num_docs": 639, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)": { |
|
"name": "arabic_mmlu:Islamic Studies (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 334, |
|
"effective_num_docs": 334, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 999, |
|
"effective_num_docs": 999, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)": { |
|
"name": "arabic_mmlu:Law (Professional)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Law (Professional)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 314, |
|
"effective_num_docs": 314, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)": { |
|
"name": "arabic_mmlu:Management (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Management (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)": { |
|
"name": "arabic_mmlu:Math (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Math (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 409, |
|
"effective_num_docs": 409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)": { |
|
"name": "arabic_mmlu:Natural Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 242, |
|
"effective_num_docs": 242, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)": { |
|
"name": "arabic_mmlu:Natural Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 336, |
|
"effective_num_docs": 336, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)": { |
|
"name": "arabic_mmlu:Philosophy (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Philosophy (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 39, |
|
"effective_num_docs": 39, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)": { |
|
"name": "arabic_mmlu:Physics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Physics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 255, |
|
"effective_num_docs": 255, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)": { |
|
"name": "arabic_mmlu:Political Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Political Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 210, |
|
"effective_num_docs": 210, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)": { |
|
"name": "arabic_mmlu:Social Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 241, |
|
"effective_num_docs": 241, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)": { |
|
"name": "arabic_mmlu:Social Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 705, |
|
"effective_num_docs": 705, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra": { |
|
"name": "arabic_mmlu_ht:abstract_algebra", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy": { |
|
"name": "arabic_mmlu_ht:anatomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy": { |
|
"name": "arabic_mmlu_ht:astronomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics": { |
|
"name": "arabic_mmlu_ht:business_ethics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge": { |
|
"name": "arabic_mmlu_ht:clinical_knowledge", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology": { |
|
"name": "arabic_mmlu_ht:college_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry": { |
|
"name": "arabic_mmlu_ht:college_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science": { |
|
"name": "arabic_mmlu_ht:college_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics": { |
|
"name": "arabic_mmlu_ht:college_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine": { |
|
"name": "arabic_mmlu_ht:college_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics": { |
|
"name": "arabic_mmlu_ht:college_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security": { |
|
"name": "arabic_mmlu_ht:computer_security", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics": { |
|
"name": "arabic_mmlu_ht:conceptual_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics": { |
|
"name": "arabic_mmlu_ht:econometrics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering": { |
|
"name": "arabic_mmlu_ht:electrical_engineering", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics": { |
|
"name": "arabic_mmlu_ht:elementary_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic": { |
|
"name": "arabic_mmlu_ht:formal_logic", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts": { |
|
"name": "arabic_mmlu_ht:global_facts", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology": { |
|
"name": "arabic_mmlu_ht:high_school_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3813, |
|
"effective_num_docs": 3813, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry": { |
|
"name": "arabic_mmlu_ht:high_school_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4016, |
|
"effective_num_docs": 4016, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science": { |
|
"name": "arabic_mmlu_ht:high_school_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history": { |
|
"name": "arabic_mmlu_ht:high_school_european_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 8152, |
|
"effective_num_docs": 8152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography": { |
|
"name": "arabic_mmlu_ht:high_school_geography", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics": { |
|
"name": "arabic_mmlu_ht:high_school_government_and_politics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_macroeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2891, |
|
"effective_num_docs": 2891, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics": { |
|
"name": "arabic_mmlu_ht:high_school_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_microeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics": { |
|
"name": "arabic_mmlu_ht:high_school_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology": { |
|
"name": "arabic_mmlu_ht:high_school_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics": { |
|
"name": "arabic_mmlu_ht:high_school_statistics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4232, |
|
"effective_num_docs": 4232, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history": { |
|
"name": "arabic_mmlu_ht:high_school_us_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history": { |
|
"name": "arabic_mmlu_ht:high_school_world_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging": { |
|
"name": "arabic_mmlu_ht:human_aging", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality": { |
|
"name": "arabic_mmlu_ht:human_sexuality", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law": { |
|
"name": "arabic_mmlu_ht:international_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence": { |
|
"name": "arabic_mmlu_ht:jurisprudence", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies": { |
|
"name": "arabic_mmlu_ht:logical_fallacies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning": { |
|
"name": "arabic_mmlu_ht:machine_learning", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management": { |
|
"name": "arabic_mmlu_ht:management", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "management", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing": { |
|
"name": "arabic_mmlu_ht:marketing", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics": { |
|
"name": "arabic_mmlu_ht:medical_genetics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous": { |
|
"name": "arabic_mmlu_ht:miscellaneous", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2420, |
|
"effective_num_docs": 2420, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes": { |
|
"name": "arabic_mmlu_ht:moral_disputes", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios": { |
|
"name": "arabic_mmlu_ht:moral_scenarios", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition": { |
|
"name": "arabic_mmlu_ht:nutrition", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy": { |
|
"name": "arabic_mmlu_ht:philosophy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory": { |
|
"name": "arabic_mmlu_ht:prehistory", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting": { |
|
"name": "arabic_mmlu_ht:professional_accounting", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4514, |
|
"effective_num_docs": 4514, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law": { |
|
"name": "arabic_mmlu_ht:professional_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7987, |
|
"effective_num_docs": 7987, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine": { |
|
"name": "arabic_mmlu_ht:professional_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1637, |
|
"effective_num_docs": 1637, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology": { |
|
"name": "arabic_mmlu_ht:professional_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3503, |
|
"effective_num_docs": 3503, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations": { |
|
"name": "arabic_mmlu_ht:public_relations", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies": { |
|
"name": "arabic_mmlu_ht:security_studies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology": { |
|
"name": "arabic_mmlu_ht:sociology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy": { |
|
"name": "arabic_mmlu_ht:us_foreign_policy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology": { |
|
"name": "arabic_mmlu_ht:virology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions": { |
|
"name": "arabic_mmlu_ht:world_religions", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Ethics": { |
|
"name": "aratrust:Ethics", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 60, |
|
"effective_num_docs": 60, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Illegal": { |
|
"name": "aratrust:Illegal", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Illegal", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 53, |
|
"effective_num_docs": 53, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:MentalHealth": { |
|
"name": "aratrust:MentalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "MentalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 76, |
|
"effective_num_docs": 76, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Offensive": { |
|
"name": "aratrust:Offensive", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Offensive", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 69, |
|
"effective_num_docs": 69, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth": { |
|
"name": "aratrust:PhysicalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "PhysicalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 73, |
|
"effective_num_docs": 73, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Privacy": { |
|
"name": "aratrust:Privacy", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Privacy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Trustfulness": { |
|
"name": "aratrust:Trustfulness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Trustfulness", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 78, |
|
"effective_num_docs": 78, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Unfairness": { |
|
"name": "aratrust:Unfairness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Unfairness", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 55, |
|
"effective_num_docs": 55, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)": { |
|
"name": "madinah_qa:Arabic Language (General)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)": { |
|
"name": "madinah_qa:Arabic Language (Grammar)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alrage_qa": { |
|
"name": "alrage_qa", |
|
"prompt_function": "qa_prompt_arabic", |
|
"hf_repo": "OALL/ALRAGE", |
|
"hf_subset": null, |
|
"metric": [ |
|
{ |
|
"metric_name": "llm_as_judge", |
|
"higher_is_better": true, |
|
"category": "7", |
|
"use_case": "10", |
|
"sample_level_fn": "_sample_level_fn", |
|
"corpus_level_fn": "aggregate_scores" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": 200, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2106, |
|
"effective_num_docs": 2106, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"hashes": { |
|
"hash_examples": "c07a5e78c5c0b8fe", |
|
"hash_full_prompts": "9704f13d42aaac7a", |
|
"hash_input_tokens": "45d7aee46bd96e75", |
|
"hash_cont_tokens": "a4b5cde07689a241" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 557, |
|
"padded": 2228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"hashes": { |
|
"hash_examples": "c0b6081f83e14064", |
|
"hash_full_prompts": "a538720d0eb01c32", |
|
"hash_input_tokens": "49773aaac1894d1a", |
|
"hash_cont_tokens": "4fbb30b02cfeb5fc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5395, |
|
"padded": 21580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"hashes": { |
|
"hash_examples": "64eb78a7c5b7484b", |
|
"hash_full_prompts": "d1ce133087326eb6", |
|
"hash_input_tokens": "b3074eba2d2a6ef4", |
|
"hash_cont_tokens": "78946fb56f7c208e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"hashes": { |
|
"hash_examples": "54fc3502c1c02c06", |
|
"hash_full_prompts": "966768bc44517467", |
|
"hash_input_tokens": "35ac1a38730ff557", |
|
"hash_cont_tokens": "13c2f57cade6f606" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 150, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"hashes": { |
|
"hash_examples": "46572d83696552ae", |
|
"hash_full_prompts": "08c25110d8c30287", |
|
"hash_input_tokens": "04d8aa591b0e72b6", |
|
"hash_cont_tokens": "b996ea6d28976bee" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"hashes": { |
|
"hash_examples": "f430d97ff715bc1c", |
|
"hash_full_prompts": "a2501ca9b8d6bb4c", |
|
"hash_input_tokens": "b545d0c43affb25d", |
|
"hash_cont_tokens": "bc720754176e304f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"hashes": { |
|
"hash_examples": "6b70a7416584f98c", |
|
"hash_full_prompts": "35561e4c822a0444", |
|
"hash_input_tokens": "45baee50ddccb0b2", |
|
"hash_cont_tokens": "e924c385f778b255" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7995, |
|
"padded": 15990, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "bc2005cc9d2f436e", |
|
"hash_full_prompts": "2888687daa327271", |
|
"hash_input_tokens": "3cf154446bd85bd9", |
|
"hash_cont_tokens": "e9ddde446c261879" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5995, |
|
"padded": 17985, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "6fb0e254ea5945d8", |
|
"hash_full_prompts": "cdc4394ef0e8335c", |
|
"hash_input_tokens": "4a824b20aabf28a8", |
|
"hash_cont_tokens": "402ca55a36435f78" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1720, |
|
"padded": 5160, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_exams|0": { |
|
"hashes": { |
|
"hash_examples": "6d721df351722656", |
|
"hash_full_prompts": "2b61044106186432", |
|
"hash_input_tokens": "b47e1166b3900a99", |
|
"hash_cont_tokens": "fca965456f1f6fd2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 537, |
|
"padded": 2148, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"hashes": { |
|
"hash_examples": "30e09697562ff9e7", |
|
"hash_full_prompts": "45bf39d98243b0d9", |
|
"hash_input_tokens": "9650e65a157b377f", |
|
"hash_cont_tokens": "722e92e0ed8eef05" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 74, |
|
"padded": 256, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "bef69fb8b3b75f28", |
|
"hash_full_prompts": "dbd441fa7521b979", |
|
"hash_input_tokens": "74faeeb06237dd8c", |
|
"hash_cont_tokens": "e0b98bf5e852b311" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2403, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "bd066a9e6a140a4b", |
|
"hash_full_prompts": "049b75042e583cfb", |
|
"hash_input_tokens": "424589e363428b96", |
|
"hash_cont_tokens": "9b45f8df4797b70a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1588, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a9c2cd9a9929292a", |
|
"hash_full_prompts": "de7ca68348f85e9e", |
|
"hash_input_tokens": "3ddcb7c7b551ffb3", |
|
"hash_cont_tokens": "6202201a407de8a6" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1525, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "2f8a77bbbd0e21ff", |
|
"hash_full_prompts": "acdd9d293ade2948", |
|
"hash_input_tokens": "4be89be3887593f9", |
|
"hash_cont_tokens": "29edad370d9b6d77" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 105, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5eed3da47822539b", |
|
"hash_full_prompts": "71253c469fb0db6e", |
|
"hash_input_tokens": "1782e48af7e67628", |
|
"hash_cont_tokens": "3da539ef53855623" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 252, |
|
"padded": 926, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "91ae6d22a0f0213d", |
|
"hash_full_prompts": "ec34f74a2fd3797a", |
|
"hash_input_tokens": "00d6a92fa7acb2a3", |
|
"hash_cont_tokens": "d114e74b2bc98f2b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1409, |
|
"padded": 5056, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "f27bf8791bea2bb9", |
|
"hash_full_prompts": "0dceebf88fd44571", |
|
"hash_input_tokens": "08cb4d6320183d81", |
|
"hash_cont_tokens": "528a5a2bdda00b42" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 316, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "74f5bb0098c8916f", |
|
"hash_full_prompts": "a29205ff9055be06", |
|
"hash_input_tokens": "1c270c91e08d5dcd", |
|
"hash_cont_tokens": "75d6d9fd3da64a8f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 236, |
|
"padded": 944, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a4278d7b525d46fe", |
|
"hash_full_prompts": "d8634efd7c025a18", |
|
"hash_input_tokens": "2b7630dcc0cdb78d", |
|
"hash_cont_tokens": "82a19a756de7d8bc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 261, |
|
"padded": 1006, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "0cb6c07e4b80dfd4", |
|
"hash_full_prompts": "5e39569b09904dab", |
|
"hash_input_tokens": "0a6df7f75fcfbf35", |
|
"hash_cont_tokens": "2de48c62107e1bff" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 100, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "d96fc1bc32473533", |
|
"hash_full_prompts": "6954dedbed0c3ede", |
|
"hash_input_tokens": "a890e02d56937cd8", |
|
"hash_cont_tokens": "b4b8ad2c5c96a780" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 190, |
|
"padded": 476, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "8835587e436cbaff", |
|
"hash_full_prompts": "f8558abf7d8cce4a", |
|
"hash_input_tokens": "150232fe4db71e86", |
|
"hash_cont_tokens": "916f5d493f4d8503" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 64, |
|
"padded": 255, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"hashes": { |
|
"hash_examples": "7a4c38a2c451d075", |
|
"hash_full_prompts": "d51a2e8c836b7565", |
|
"hash_input_tokens": "515c34f462227271", |
|
"hash_cont_tokens": "d6cb5e7eca4cdbb0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1211, |
|
"padded": 3685, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c04c252836601279", |
|
"hash_full_prompts": "a488ee9c2b4956f8", |
|
"hash_input_tokens": "e02014ad7ba7eefd", |
|
"hash_cont_tokens": "aed1084168b32733" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 360, |
|
"padded": 1390, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "18fba1579406b3cc", |
|
"hash_full_prompts": "5eb3a05de026facf", |
|
"hash_input_tokens": "d5ab5bc7f8c6423c", |
|
"hash_cont_tokens": "1cf14304da0991a6" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 348, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"hashes": { |
|
"hash_examples": "7c9e86fba8151562", |
|
"hash_full_prompts": "87bdfe4b3ceb2b3e", |
|
"hash_input_tokens": "6311f1f88a3b7969", |
|
"hash_cont_tokens": "6d8eefac8441ae6f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 137, |
|
"padded": 544, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "acfbe4e1f0314b85", |
|
"hash_full_prompts": "9b2ebb78e82d691d", |
|
"hash_input_tokens": "d3d7abc03d15ba5f", |
|
"hash_cont_tokens": "0bbef8c66dfb12c2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 864, |
|
"padded": 3213, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "03cd0ecf10224316", |
|
"hash_full_prompts": "b98f8b69af330d19", |
|
"hash_input_tokens": "23a0a0524155b5e8", |
|
"hash_cont_tokens": "8cf21e849e75a6b6" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 172, |
|
"padded": 628, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "c3ee30196e05e122", |
|
"hash_full_prompts": "a6a7fb6b2ae0fac7", |
|
"hash_input_tokens": "06e29f4f1d1d4ca9", |
|
"hash_cont_tokens": "8dd7837f42ce56c1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 162, |
|
"padded": 637, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "e2e329d2bdd9fb7b", |
|
"hash_full_prompts": "cdb46f2e3620daed", |
|
"hash_input_tokens": "d6cbef7f55e42320", |
|
"hash_cont_tokens": "b91a848eb3775a72" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1038, |
|
"padded": 4116, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "420b161444291989", |
|
"hash_full_prompts": "9ac0a30991442545", |
|
"hash_input_tokens": "33cc8ac1e6c854fb", |
|
"hash_cont_tokens": "0f1ee943a5ab6032" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 975, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5bc5ca48a4210899", |
|
"hash_full_prompts": "ba4ae259d969a834", |
|
"hash_input_tokens": "34fdddda63a3fb1d", |
|
"hash_cont_tokens": "cc16316a5a4d2fd7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 57, |
|
"padded": 216, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c7cc37f29311bea1", |
|
"hash_full_prompts": "96818ffea465df48", |
|
"hash_input_tokens": "83da5de6b90c3da2", |
|
"hash_cont_tokens": "dbfe068cd109683f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 760, |
|
"padded": 2962, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "5b9f1973337153a2", |
|
"hash_full_prompts": "031e244d16605fcf", |
|
"hash_input_tokens": "065ec7793dbecdf9", |
|
"hash_cont_tokens": "6b9ee60d0401aa4a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 746, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "af2469847007c1fe", |
|
"hash_full_prompts": "342008f851de3435", |
|
"hash_input_tokens": "6ee32bb86987f0c0", |
|
"hash_cont_tokens": "17cf0e1bf91880c3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"hashes": { |
|
"hash_examples": "c8da9b2f16a5ea0f", |
|
"hash_full_prompts": "642ca9ca1c0a45fc", |
|
"hash_input_tokens": "416aa2f358c87413", |
|
"hash_cont_tokens": "3287e41b63d8d4c3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 639, |
|
"padded": 2529, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "efb11bc8ef398117", |
|
"hash_full_prompts": "ba987a5add7fced1", |
|
"hash_input_tokens": "e3269b3dc28b0f00", |
|
"hash_cont_tokens": "773414d1333a19bb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 334, |
|
"padded": 1285, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "9e33ab030eebdb99", |
|
"hash_full_prompts": "cbc50d5996b4c1d6", |
|
"hash_input_tokens": "dc9701e7c6c45e1e", |
|
"hash_cont_tokens": "c26f7323d0a2a9a8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 883, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "4167565d878b20eb", |
|
"hash_full_prompts": "076a99b663e16c05", |
|
"hash_input_tokens": "aea1258db80dd7d0", |
|
"hash_cont_tokens": "5d0c2ae73a8231ab" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 999, |
|
"padded": 3024, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"hashes": { |
|
"hash_examples": "e77f52c8fe4352b3", |
|
"hash_full_prompts": "9e01e202d1eeb336", |
|
"hash_input_tokens": "84928c671c43ee4c", |
|
"hash_cont_tokens": "1d436b61561818b3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 314, |
|
"padded": 1232, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"hashes": { |
|
"hash_examples": "09682649b04b7327", |
|
"hash_full_prompts": "2e00cd767bdb55bf", |
|
"hash_input_tokens": "5699060ef1650763", |
|
"hash_cont_tokens": "1981e98962781f9a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 200, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "edb027bfae7e76f1", |
|
"hash_full_prompts": "b74cb9e6b696588e", |
|
"hash_input_tokens": "4234385ab0d8d223", |
|
"hash_cont_tokens": "d13ff3a1a2c6f760" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 409, |
|
"padded": 1296, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "96e72c9094c2364c", |
|
"hash_full_prompts": "be5a4f4aeeef3531", |
|
"hash_input_tokens": "149708d4c97c2234", |
|
"hash_cont_tokens": "e6b0b16caa92a2bb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 242, |
|
"padded": 940, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "69e35bad3dec5a4d", |
|
"hash_full_prompts": "9e456bd1414463dc", |
|
"hash_input_tokens": "af31e705aabea83f", |
|
"hash_cont_tokens": "37e9dbe21639fa2a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 336, |
|
"padded": 1228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "dc6ebd484a02fca5", |
|
"hash_full_prompts": "cf0f19e1926c86aa", |
|
"hash_input_tokens": "e67146ddcc0aa1c8", |
|
"hash_cont_tokens": "0cf456155ad8748c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 39, |
|
"padded": 156, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "58a1722472c9e644", |
|
"hash_full_prompts": "530aecb4916970f4", |
|
"hash_input_tokens": "c1f8ffaaefbbe425", |
|
"hash_cont_tokens": "da8c90d9a7e0fe06" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 255, |
|
"padded": 1020, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "07a4ed6aabbdfd1e", |
|
"hash_full_prompts": "0d2bf7638e4bf5f0", |
|
"hash_input_tokens": "365952e90ee495bf", |
|
"hash_cont_tokens": "265a627fab96c32a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 210, |
|
"padded": 710, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "8ca955902f304664", |
|
"hash_full_prompts": "64a6778a67cf389b", |
|
"hash_input_tokens": "c910fd7a60c94fa9", |
|
"hash_cont_tokens": "5660af9aaa36f96f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 241, |
|
"padded": 929, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "934025ab3738123c", |
|
"hash_full_prompts": "e93560547a1d5fe6", |
|
"hash_input_tokens": "79e8fe842f004922", |
|
"hash_cont_tokens": "244f555886b69fee" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 705, |
|
"padded": 2043, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "0b557911f2f6d919", |
|
"hash_full_prompts": "1c39fdf0d0201e1d", |
|
"hash_input_tokens": "8bf0cbb2bf4048d7", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "a552d8a0ef294061", |
|
"hash_full_prompts": "de409200c298fb8e", |
|
"hash_input_tokens": "9c3bd4733ddd9e4c", |
|
"hash_cont_tokens": "0628d49ae9f8b974" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 540, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "c4a372d0af7da098", |
|
"hash_full_prompts": "3ed0b865097d27bd", |
|
"hash_input_tokens": "efb84d67f86d0533", |
|
"hash_cont_tokens": "b7a58f27972beb74" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 608, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "9f71d816abf8af7a", |
|
"hash_full_prompts": "2a428dfb65a7adb8", |
|
"hash_input_tokens": "163c43cca056ef9d", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "38303cd765589ef3", |
|
"hash_full_prompts": "931daea7c35de35f", |
|
"hash_input_tokens": "ba2c54c9466140b4", |
|
"hash_cont_tokens": "82a0038e37a8fc52" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1060, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "dbd9b5d318e60b04", |
|
"hash_full_prompts": "fe83fd114ffbbb23", |
|
"hash_input_tokens": "7170efa52aa48343", |
|
"hash_cont_tokens": "23018926e60118b5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 576, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "6f88491d03db8a4c", |
|
"hash_full_prompts": "d723c4df922b9707", |
|
"hash_input_tokens": "52cd3cfea3d64c49", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "ebfdee5ef2ed5e17", |
|
"hash_full_prompts": "141858b09e48f16f", |
|
"hash_input_tokens": "a2ae09fec2e63454", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "e3f22cd7712aae2f", |
|
"hash_full_prompts": "01802886c1a6d9c3", |
|
"hash_input_tokens": "950b907f38298ec7", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "51a5501373afb5a7", |
|
"hash_full_prompts": "791583ca8bc5f0f2", |
|
"hash_input_tokens": "cdb8f6d950b2df05", |
|
"hash_cont_tokens": "9b1635576d7acead" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 688, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "2d3e015989b108db", |
|
"hash_full_prompts": "3b5344c5f816febb", |
|
"hash_input_tokens": "68f2674c4027af64", |
|
"hash_cont_tokens": "5ffe9183a1fcf79c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 408, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "f8810eddc38dfee4", |
|
"hash_full_prompts": "d0b2ed212b6463e5", |
|
"hash_input_tokens": "7b21805a097cdc8e", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "211e32cc43c6b1dc", |
|
"hash_full_prompts": "bbfd138996426d2e", |
|
"hash_input_tokens": "c671f7b05cea6992", |
|
"hash_cont_tokens": "e45443c0f50960b4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 940, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "810023786b2484d2", |
|
"hash_full_prompts": "4a235de1e0158122", |
|
"hash_input_tokens": "9d46c22f68c46387", |
|
"hash_cont_tokens": "f91d634b14c5e1c2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 456, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "a222760c93eaa1ee", |
|
"hash_full_prompts": "0f58731a48940799", |
|
"hash_input_tokens": "51e6a9f4df889a56", |
|
"hash_cont_tokens": "9bd5af701df11a58" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "4c069aeee64dc227", |
|
"hash_full_prompts": "6c05cc135fb6ea9d", |
|
"hash_input_tokens": "b701834e799185ab", |
|
"hash_cont_tokens": "19dec52aa573ebe3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1512, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "3cb0ccbf8e8a77ae", |
|
"hash_full_prompts": "5771866ad3b13bd8", |
|
"hash_input_tokens": "07aef87ef8647127", |
|
"hash_cont_tokens": "966dddf65896abf7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 504, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "c1d039e64ea321b9", |
|
"hash_full_prompts": "2fa80f995b88b751", |
|
"hash_input_tokens": "0041369fdb7a54e3", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "ddcb8237bb4ba08a", |
|
"hash_full_prompts": "a3204667f3a2c9ea", |
|
"hash_input_tokens": "02da53a657257eec", |
|
"hash_cont_tokens": "c95c810b6bed184c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3813, |
|
"padded": 15218, |
|
"non_padded": 34, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "07061b55c5c436d9", |
|
"hash_full_prompts": "3c9e21d36f9be2a3", |
|
"hash_input_tokens": "9b808c3bfc1377a8", |
|
"hash_cont_tokens": "586db2a947a768e8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4016, |
|
"padded": 15938, |
|
"non_padded": 126, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "8d3405483d5fdcff", |
|
"hash_full_prompts": "64110a10c017b881", |
|
"hash_input_tokens": "1c0faf1f96601749", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "031c49a430356414", |
|
"hash_full_prompts": "c2139f0680612afe", |
|
"hash_input_tokens": "0ce0c37544bfa7ad", |
|
"hash_cont_tokens": "ea5fcf32fb65faef" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 8152, |
|
"padded": 32404, |
|
"non_padded": 204, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "d0ce2b019a66c1de", |
|
"hash_full_prompts": "48bdc3c231e0c2bf", |
|
"hash_input_tokens": "76c85fc0f2642572", |
|
"hash_cont_tokens": "92409f576406eb4b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 788, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "7d7c6d476d0576b1", |
|
"hash_full_prompts": "f3ef6e471b9b957c", |
|
"hash_input_tokens": "87793a1c8d6b7485", |
|
"hash_cont_tokens": "305fb0ae0418294b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 772, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "694d3a01c6144ddb", |
|
"hash_full_prompts": "6e0a0ce2d46241bf", |
|
"hash_input_tokens": "cc9e54ca9297fb40", |
|
"hash_cont_tokens": "8892ec4dc85a392c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2891, |
|
"padded": 11343, |
|
"non_padded": 221, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "004f9c0a40b5ec10", |
|
"hash_full_prompts": "97812804456776da", |
|
"hash_input_tokens": "2282be24e6fada8c", |
|
"hash_cont_tokens": "f856e2d9fc6cfdfc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1048, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "80cf03d462e6ccbc", |
|
"hash_full_prompts": "c684dfee0f9c39d5", |
|
"hash_input_tokens": "ec90f66d3bd40634", |
|
"hash_cont_tokens": "c5820c1aeb211d23" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 936, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "92218def5b383845", |
|
"hash_full_prompts": "f380730d633dba83", |
|
"hash_input_tokens": "a7f48e9de2e291f8", |
|
"hash_cont_tokens": "fb84853294b2b0d0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 604, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "323f7848fee32e58", |
|
"hash_full_prompts": "5eb364d5fad780dd", |
|
"hash_input_tokens": "e124889640c5c9cb", |
|
"hash_cont_tokens": "b558e30df7a1239f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2120, |
|
"non_padded": 60, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "d7bbe0d037cf31ec", |
|
"hash_full_prompts": "aad87774256ebb3d", |
|
"hash_input_tokens": "bf689dbd92cb4d42", |
|
"hash_cont_tokens": "000017053f382976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4232, |
|
"padded": 16536, |
|
"non_padded": 392, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "722ec9207e3b0e04", |
|
"hash_full_prompts": "bc91172a4ec55638", |
|
"hash_input_tokens": "a50c5b3ea58f147d", |
|
"hash_cont_tokens": "bcacce95ad24ca35" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "b5eb675d3b578584", |
|
"hash_full_prompts": "dbb1e368f510e61a", |
|
"hash_input_tokens": "5f86441cf8809866", |
|
"hash_cont_tokens": "81b85933ff75eada" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "713ac79cd2dd2d7b", |
|
"hash_full_prompts": "cb02809083b96a35", |
|
"hash_input_tokens": "58017376247be16e", |
|
"hash_cont_tokens": "defb7fa0538591e9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 856, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "47551ab4e5dcf6c5", |
|
"hash_full_prompts": "f1f66eb8478a4245", |
|
"hash_input_tokens": "b6767ca53032bab7", |
|
"hash_cont_tokens": "1549f2f21fc02698" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 508, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "da360336943398d5", |
|
"hash_full_prompts": "bd695703d94fc7f2", |
|
"hash_input_tokens": "58b5fffbcd92ca46", |
|
"hash_cont_tokens": "c1adbcb36434e9cc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 476, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "661d161a486fb035", |
|
"hash_full_prompts": "657730e2a40ef9d0", |
|
"hash_input_tokens": "a0c917051f736b8d", |
|
"hash_cont_tokens": "78f6791d5b651593" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 428, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "5c3926384758bda7", |
|
"hash_full_prompts": "e6cdc016f7b62db7", |
|
"hash_input_tokens": "b6f370ad45598cd8", |
|
"hash_cont_tokens": "52f71f5e13bdb440" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 632, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "3ce756e6a22ffc48", |
|
"hash_full_prompts": "a7b776afddeb65ae", |
|
"hash_input_tokens": "2e4e4df491a9885d", |
|
"hash_cont_tokens": "e8f373ce528c59e0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 436, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"hashes": { |
|
"hash_examples": "20fe769bb3276832", |
|
"hash_full_prompts": "83c3c53407e202ac", |
|
"hash_input_tokens": "30b6f4682bb95f6e", |
|
"hash_cont_tokens": "f579c92ec22a5e4f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 364, |
|
"non_padded": 48, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "6b19449559d987ce", |
|
"hash_full_prompts": "8840dfe542a90eb4", |
|
"hash_input_tokens": "52a96c8067f1cd3a", |
|
"hash_cont_tokens": "06b5e63365285b62" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 916, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "cbb0fa9df0f5435a", |
|
"hash_full_prompts": "774bf50c5959b391", |
|
"hash_input_tokens": "6c4d0505454aa1e9", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "0a4134046c23cff9", |
|
"hash_full_prompts": "cd80399d546ad186", |
|
"hash_input_tokens": "09639aca1c65c8c6", |
|
"hash_cont_tokens": "ace4e42b79881976" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2420, |
|
"padded": 9416, |
|
"non_padded": 264, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "1ac8a0967c82caa0", |
|
"hash_full_prompts": "37f5803bc1815a85", |
|
"hash_input_tokens": "63dbf604e2ac2f44", |
|
"hash_cont_tokens": "343b198ebf398086" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1364, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "2c0670188bc5a789", |
|
"hash_full_prompts": "80aaab8f1d3a540e", |
|
"hash_input_tokens": "049221db5f32996a", |
|
"hash_cont_tokens": "486782219e185240" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3532, |
|
"non_padded": 48, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "658628c0dcdfe201", |
|
"hash_full_prompts": "6add405f8ca59370", |
|
"hash_input_tokens": "f68397c8ad257764", |
|
"hash_cont_tokens": "e6b09983adb6beb8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1200, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "8b6707b322affafd", |
|
"hash_full_prompts": "408ba4f9c8517dbd", |
|
"hash_input_tokens": "67f48310278b5902", |
|
"hash_cont_tokens": "c62cb6bed228b362" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1188, |
|
"non_padded": 56, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "0c85ffcdc9a7b367", |
|
"hash_full_prompts": "476a58719b8eff04", |
|
"hash_input_tokens": "bdfbe0545abb06a9", |
|
"hash_cont_tokens": "6d4b228157c20260" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1256, |
|
"non_padded": 40, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "cce1ea2d5f544b2f", |
|
"hash_full_prompts": "d447edaeb450c452", |
|
"hash_input_tokens": "6ffd2f9ea8b92e98", |
|
"hash_cont_tokens": "7a6e201618ba8900" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4514, |
|
"padded": 17656, |
|
"non_padded": 400, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "1c654b024b54eb4b", |
|
"hash_full_prompts": "e95c4e48604b38bd", |
|
"hash_input_tokens": "208a1783cd4eb883", |
|
"hash_cont_tokens": "7785b2f331961e0d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7987, |
|
"padded": 31516, |
|
"non_padded": 432, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "c621eaacfa662ebc", |
|
"hash_full_prompts": "fed41436922d3326", |
|
"hash_input_tokens": "4b76b00fecc17060", |
|
"hash_cont_tokens": "4d39a5a8c106cbc1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1637, |
|
"padded": 6388, |
|
"non_padded": 160, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "bc14a28eaec87dc4", |
|
"hash_full_prompts": "f4e3f8bdda5d5f8d", |
|
"hash_input_tokens": "2086988b2fcd5de1", |
|
"hash_cont_tokens": "ae3f2606ef4c8870" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3503, |
|
"padded": 13664, |
|
"non_padded": 348, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "de4989d9375885c4", |
|
"hash_full_prompts": "42e94d0ef7696a87", |
|
"hash_input_tokens": "86d9e88be3e5d539", |
|
"hash_cont_tokens": "5c28d31f06f032c1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 436, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "3f84bfeec717c6de", |
|
"hash_full_prompts": "4a51b2d3d6f41de8", |
|
"hash_input_tokens": "fdf0940c09f7823b", |
|
"hash_cont_tokens": "cf0f49982dc87860" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 976, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "10d7c2fae10bfcbc", |
|
"hash_full_prompts": "e0334b66d2581545", |
|
"hash_input_tokens": "81b8e8795d63a842", |
|
"hash_cont_tokens": "ef42dead78077738" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 788, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "bb05f02c38ddaf1a", |
|
"hash_full_prompts": "60569d889c3d7dc2", |
|
"hash_input_tokens": "604121570dfcb89c", |
|
"hash_cont_tokens": "093d8506794b6835" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 392, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"hashes": { |
|
"hash_examples": "290915a48884ede2", |
|
"hash_full_prompts": "50d8fc71d3f1c455", |
|
"hash_input_tokens": "78198eb2221d6cd8", |
|
"hash_cont_tokens": "2b1b1b04d8943fa0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 656, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "91cc5451c7284f75", |
|
"hash_full_prompts": "7f74aa0157e0b69a", |
|
"hash_input_tokens": "6ae13a968bd6e38d", |
|
"hash_cont_tokens": "94e5474fab6f0c97" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 664, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "25bf94d05f737b63", |
|
"hash_full_prompts": "ddd29160c14f29f6", |
|
"hash_input_tokens": "da0c51f65d6be823", |
|
"hash_cont_tokens": "3052f3111aefe0bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2392, |
|
"non_padded": 11, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "e65fe4df843f4380", |
|
"hash_full_prompts": "567e5f484271f133", |
|
"hash_input_tokens": "8dce158a8b61ad63", |
|
"hash_cont_tokens": "3f57c831d5d970fc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1577, |
|
"non_padded": 11, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"hashes": { |
|
"hash_examples": "5d32da36271c5eb4", |
|
"hash_full_prompts": "55e31dfd2c49b13a", |
|
"hash_input_tokens": "1330f773ab6da394", |
|
"hash_cont_tokens": "9aabf6c5c20a98f8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 60, |
|
"padded": 180, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"hashes": { |
|
"hash_examples": "0c07f1f100f2d0e8", |
|
"hash_full_prompts": "6ef45c67f2b68581", |
|
"hash_input_tokens": "c7112f94bd16b99f", |
|
"hash_cont_tokens": "a9c10219111fa722" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 53, |
|
"padded": 159, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "8e5fc5c4704bd96b", |
|
"hash_full_prompts": "98b7cd0de2909760", |
|
"hash_input_tokens": "8f97ce3bdc7e5b03", |
|
"hash_cont_tokens": "9d17480b8be66b0e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 76, |
|
"padded": 228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"hashes": { |
|
"hash_examples": "5ad4369b7dc5de46", |
|
"hash_full_prompts": "235e996d0ca6570f", |
|
"hash_input_tokens": "77717f8eaaee027b", |
|
"hash_cont_tokens": "c2435af60a8a3ec9" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 69, |
|
"padded": 207, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "dc2a632e2dcc86db", |
|
"hash_full_prompts": "f7db2869fbf05e5b", |
|
"hash_input_tokens": "314dae158b573d39", |
|
"hash_cont_tokens": "c69678795d83ab16" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 73, |
|
"padded": 218, |
|
"non_padded": 1, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"hashes": { |
|
"hash_examples": "295e35448a39e003", |
|
"hash_full_prompts": "06b3fd8346033a04", |
|
"hash_input_tokens": "869640a7d3d78af2", |
|
"hash_cont_tokens": "efbcab1627e2e5df" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 57, |
|
"padded": 157, |
|
"non_padded": 14, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"hashes": { |
|
"hash_examples": "e79ac1ea5439e623", |
|
"hash_full_prompts": "4a0823dfb6363ed5", |
|
"hash_input_tokens": "1c6e414bccbf6776", |
|
"hash_cont_tokens": "2d3ddcbabe9a8f0e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 78, |
|
"padded": 234, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"hashes": { |
|
"hash_examples": "4ac5dccbfbdc5077", |
|
"hash_full_prompts": "1ad2169e9d397275", |
|
"hash_input_tokens": "929d0e3aa13c9fdf", |
|
"hash_cont_tokens": "59878db838346c14" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 55, |
|
"padded": 144, |
|
"non_padded": 21, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alrage_qa|0": { |
|
"hashes": { |
|
"hash_examples": "3edbbe22cabd4160", |
|
"hash_full_prompts": "59ca8a54bd011dfa", |
|
"hash_input_tokens": "d2d26b05824021f2", |
|
"hash_cont_tokens": "d929d18d54adeebd" |
|
}, |
|
"truncated": 2106, |
|
"non_truncated": 0, |
|
"padded": 2106, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "b8b3b49631adcc40", |
|
"hash_full_prompts": "679923b9410bb0b8", |
|
"hash_input_tokens": "5190c54918afaac9", |
|
"hash_cont_tokens": "a9eedebe12cb2965" |
|
}, |
|
"truncated": 105, |
|
"non_truncated": 91757, |
|
"padded": 333673, |
|
"non_padded": 3211, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |