|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null, |
|
"job_id": 0, |
|
"start_time": 698.764930847, |
|
"end_time": 12137.222919043, |
|
"total_evaluation_time_secondes": "11438.457988196002", |
|
"model_name": "v000000/Qwen2.5-14B-Gutenberg-1e-Delta", |
|
"model_sha": "5bf7cca8eacba3ef91c8271790fd5821d26c5d7d", |
|
"model_dtype": "torch.bfloat16", |
|
"model_size": "27.51 GB" |
|
}, |
|
"results": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"acc_norm": 0.3752244165170557, |
|
"acc_norm_stderr": 0.02053384001863653 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"acc_norm": 0.3862835959221501, |
|
"acc_norm_stderr": 0.006629511489448706 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"acc_norm": 0.40893854748603353, |
|
"acc_norm_stderr": 0.01644283065471554 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.05807730170189531 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"acc_norm": 0.5866666666666667, |
|
"acc_norm_stderr": 0.040341569222180455 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.04087046889188089 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"acc_norm": 0.8213883677298311, |
|
"acc_norm_stderr": 0.004283976421092546 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"acc_norm": 0.5658048373644704, |
|
"acc_norm_stderr": 0.006402026228969449 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"acc_norm": 0.4145348837209302, |
|
"acc_norm_stderr": 0.011882097588646086 |
|
}, |
|
"community|arabic_exams|0": { |
|
"acc_norm": 0.5344506517690876, |
|
"acc_norm_stderr": 0.0215453856520788 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.058520573598065284 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"acc_norm": 0.4166666666666667, |
|
"acc_norm_stderr": 0.01994491413687358 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.2876712328767123, |
|
"acc_norm_stderr": 0.023726723391354485 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"acc_norm": 0.30256410256410254, |
|
"acc_norm_stderr": 0.023290888053772746 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"acc_norm": 0.7037037037037037, |
|
"acc_norm_stderr": 0.0895511888632576 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"acc_norm": 0.6349206349206349, |
|
"acc_norm_stderr": 0.030389006601099094 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"acc_norm": 0.4364797728885735, |
|
"acc_norm_stderr": 0.013217079083833856 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"acc_norm": 0.4367816091954023, |
|
"acc_norm_stderr": 0.05348368965287097 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"acc_norm": 0.4576271186440678, |
|
"acc_norm_stderr": 0.03249906963811369 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"acc_norm": 0.5900383141762452, |
|
"acc_norm_stderr": 0.03050177182623356 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"acc_norm": 0.9259259259259259, |
|
"acc_norm_stderr": 0.051361129280113826 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"acc_norm": 0.7368421052631579, |
|
"acc_norm_stderr": 0.0320305589184308 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"acc_norm": 0.734375, |
|
"acc_norm_stderr": 0.05564461487585784 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"acc_norm": 0.6581337737407101, |
|
"acc_norm_stderr": 0.013636173016926002 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"acc_norm": 0.5944444444444444, |
|
"acc_norm_stderr": 0.025913948390929215 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"acc_norm": 0.7241379310344828, |
|
"acc_norm_stderr": 0.04819560289115228 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"acc_norm": 0.5036496350364964, |
|
"acc_norm_stderr": 0.04287350410390778 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"acc_norm": 0.6168981481481481, |
|
"acc_norm_stderr": 0.01654849339935715 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"acc_norm": 0.7093023255813954, |
|
"acc_norm_stderr": 0.03472469304477599 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"acc_norm": 0.7037037037037037, |
|
"acc_norm_stderr": 0.03598695425263619 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"acc_norm": 0.5635838150289018, |
|
"acc_norm_stderr": 0.015400694372076511 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"acc_norm": 0.7058823529411765, |
|
"acc_norm_stderr": 0.027678468642144703 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.0629940788348712 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"acc_norm": 0.4473684210526316, |
|
"acc_norm_stderr": 0.018048022490206216 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"acc_norm": 0.6551724137931034, |
|
"acc_norm_stderr": 0.033442837442804574 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"acc_norm": 0.6176470588235294, |
|
"acc_norm_stderr": 0.04835503696107224 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"acc_norm": 0.5242566510172144, |
|
"acc_norm_stderr": 0.019771881449482668 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"acc_norm": 0.6467065868263473, |
|
"acc_norm_stderr": 0.026193847556154944 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"acc_norm": 0.6260504201680672, |
|
"acc_norm_stderr": 0.031429466378837076 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"acc_norm": 0.7387387387387387, |
|
"acc_norm_stderr": 0.013906494735089352 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"acc_norm": 0.5254777070063694, |
|
"acc_norm_stderr": 0.028224957032864047 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"acc_norm": 0.68, |
|
"acc_norm_stderr": 0.05422675115236519 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"acc_norm": 0.6577017114914425, |
|
"acc_norm_stderr": 0.02349020291153678 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"acc_norm": 0.7479338842975206, |
|
"acc_norm_stderr": 0.02796921759454876 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"acc_norm": 0.8422619047619048, |
|
"acc_norm_stderr": 0.01991450296903593 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"acc_norm": 0.6410256410256411, |
|
"acc_norm_stderr": 0.07781756136754926 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.03130299530892506 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"acc_norm": 0.6142857142857143, |
|
"acc_norm_stderr": 0.03367014035794127 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"acc_norm": 0.5269709543568465, |
|
"acc_norm_stderr": 0.03222787145346665 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"acc_norm": 0.7730496453900709, |
|
"acc_norm_stderr": 0.015786388841644444 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"acc_norm": 0.4222222222222222, |
|
"acc_norm_stderr": 0.042667634040995814 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"acc_norm": 0.7171052631578947, |
|
"acc_norm_stderr": 0.03665349695640767 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"acc_norm": 0.69, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"acc_norm": 0.5886792452830188, |
|
"acc_norm_stderr": 0.03028500925900979 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"acc_norm": 0.5486111111111112, |
|
"acc_norm_stderr": 0.04161402398403279 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"acc_norm": 0.5491329479768786, |
|
"acc_norm_stderr": 0.03794012674697031 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"acc_norm": 0.4019607843137255, |
|
"acc_norm_stderr": 0.04878608714466996 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"acc_norm": 0.5829787234042553, |
|
"acc_norm_stderr": 0.032232762667117124 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.04598188057816542 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"acc_norm": 0.47586206896551725, |
|
"acc_norm_stderr": 0.041618085035015295 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.025591857761382186 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"acc_norm": 0.373015873015873, |
|
"acc_norm_stderr": 0.04325506042017086 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"acc_norm": 0.6286388670338316, |
|
"acc_norm_stderr": 0.00782568553645601 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"acc_norm": 0.6195219123505976, |
|
"acc_norm_stderr": 0.007662145349016723 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"acc_norm": 0.5087095191364083, |
|
"acc_norm_stderr": 0.005537307738765534 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"acc_norm": 0.7323232323232324, |
|
"acc_norm_stderr": 0.03154449888270285 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"acc_norm": 0.7253886010362695, |
|
"acc_norm_stderr": 0.03221024508041153 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"acc_norm": 0.6537530266343826, |
|
"acc_norm_stderr": 0.008850156036408721 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.02659393910184407 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"acc_norm": 0.6470588235294118, |
|
"acc_norm_stderr": 0.03104194130405927 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"acc_norm": 0.7027522935779816, |
|
"acc_norm_stderr": 0.01959570722464353 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"acc_norm": 0.6122400756143668, |
|
"acc_norm_stderr": 0.007490672769886293 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"acc_norm": 0.6470588235294118, |
|
"acc_norm_stderr": 0.03354092437591521 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"acc_norm": 0.7215189873417721, |
|
"acc_norm_stderr": 0.029178682304842534 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"acc_norm": 0.5560538116591929, |
|
"acc_norm_stderr": 0.033346256742427284 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"acc_norm": 0.6183206106870229, |
|
"acc_norm_stderr": 0.042607351576445594 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"acc_norm": 0.71900826446281, |
|
"acc_norm_stderr": 0.04103203830514511 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"acc_norm": 0.6296296296296297, |
|
"acc_norm_stderr": 0.04668408033024931 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"acc_norm": 0.50920245398773, |
|
"acc_norm_stderr": 0.03927705600787443 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"acc_norm": 0.44642857142857145, |
|
"acc_norm_stderr": 0.047184714852195886 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"acc_norm": 0.6990291262135923, |
|
"acc_norm_stderr": 0.04541609446503949 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"acc_norm": 0.7051282051282052, |
|
"acc_norm_stderr": 0.029872577708891176 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"acc_norm": 0.6685950413223141, |
|
"acc_norm_stderr": 0.009570687646468302 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"acc_norm": 0.5520231213872833, |
|
"acc_norm_stderr": 0.02677299065336182 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"acc_norm": 0.4245810055865922, |
|
"acc_norm_stderr": 0.01653117099327888 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"acc_norm": 0.6503267973856209, |
|
"acc_norm_stderr": 0.0273053080762747 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"acc_norm": 0.5498392282958199, |
|
"acc_norm_stderr": 0.02825666072336018 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"acc_norm": 0.5493827160493827, |
|
"acc_norm_stderr": 0.0276847214156562 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"acc_norm": 0.5979175897208684, |
|
"acc_norm_stderr": 0.007298700375485728 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"acc_norm": 0.5059471641417304, |
|
"acc_norm_stderr": 0.005594671976555517 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"acc_norm": 0.659132559560171, |
|
"acc_norm_stderr": 0.011718917388055045 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"acc_norm": 0.6251784184984299, |
|
"acc_norm_stderr": 0.008180054784128513 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"acc_norm": 0.6090909090909091, |
|
"acc_norm_stderr": 0.04673752333670239 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"acc_norm": 0.6857142857142857, |
|
"acc_norm_stderr": 0.029719329422417465 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"acc_norm": 0.7164179104477612, |
|
"acc_norm_stderr": 0.031871875379197966 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"acc_norm": 0.74, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"acc_norm": 0.46987951807228917, |
|
"acc_norm_stderr": 0.03885425420866766 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"acc_norm": 0.6140350877192983, |
|
"acc_norm_stderr": 0.03733756969066165 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"acc_norm": 0.4068627450980392, |
|
"acc_norm_stderr": 0.019873802005061173 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.37534246575342467, |
|
"acc_norm_stderr": 0.02537956363744994 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"f1": 0.0, |
|
"f1_stderr": 0.0 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"f1": 0.018867924528301886, |
|
"f1_stderr": 0.01886792452830187 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"f1": 0.013157894736842105, |
|
"f1_stderr": 0.01315789473684212 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"f1": 0.10144927536231885, |
|
"f1_stderr": 0.036613506004249646 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"f1": 0.0, |
|
"f1_stderr": 0.0 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"f1": 0.0, |
|
"f1_stderr": 0.0 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"f1": 0.01282051282051282, |
|
"f1_stderr": 0.01282051282051282 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"f1": 0.09090909090909091, |
|
"f1_stderr": 0.03912104390108501 |
|
}, |
|
"community|alghafa:_average|0": { |
|
"acc_norm": 0.5050564424526449, |
|
"acc_norm_stderr": 0.02282929135749617 |
|
}, |
|
"community|arabic_mmlu:_average|0": { |
|
"acc_norm": 0.6085328275713281, |
|
"acc_norm_stderr": 0.034347299871804474 |
|
}, |
|
"community|arabic_mmlu_ht:_average|0": { |
|
"acc_norm": 0.5607939841696052, |
|
"acc_norm_stderr": 0.03263356040770743 |
|
}, |
|
"community|madinah_qa:_average|0": { |
|
"acc_norm": 0.39110260542573194, |
|
"acc_norm_stderr": 0.02262668282125556 |
|
}, |
|
"community|aratrust:_average|0": { |
|
"f1": 0.02965058729463332, |
|
"f1_stderr": 0.015072610248873932 |
|
}, |
|
"all": { |
|
"acc_norm": 0.5703553582129814, |
|
"acc_norm_stderr": 0.03216759001489503, |
|
"f1": 0.02965058729463332, |
|
"f1_stderr": 0.015072610248873932 |
|
}, |
|
"community|alrage_qa|0": { |
|
"llm_as_judge": 0.7867521367521287, |
|
"llm_as_judge_stderr": 8.653782403835487e-05 |
|
} |
|
}, |
|
"versions": { |
|
"community|alghafa:mcq_exams_test_ar|0": 0, |
|
"community|alghafa:meta_ar_dialects|0": 0, |
|
"community|alghafa:meta_ar_msa|0": 0, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, |
|
"community|alghafa:multiple_choice_sentiment_task|0": 0, |
|
"community|arabic_exams|0": 0, |
|
"community|arabic_mmlu:Accounting (University)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (General)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0, |
|
"community|arabic_mmlu:Biology (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (High School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (University)|0": 0, |
|
"community|arabic_mmlu:Driving Test|0": 0, |
|
"community|arabic_mmlu:Economics (High School)|0": 0, |
|
"community|arabic_mmlu:Economics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Economics (University)|0": 0, |
|
"community|arabic_mmlu:General Knowledge|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0, |
|
"community|arabic_mmlu:Geography (High School)|0": 0, |
|
"community|arabic_mmlu:Geography (Middle School)|0": 0, |
|
"community|arabic_mmlu:Geography (Primary School)|0": 0, |
|
"community|arabic_mmlu:History (High School)|0": 0, |
|
"community|arabic_mmlu:History (Middle School)|0": 0, |
|
"community|arabic_mmlu:History (Primary School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0, |
|
"community|arabic_mmlu:Law (Professional)|0": 0, |
|
"community|arabic_mmlu:Management (University)|0": 0, |
|
"community|arabic_mmlu:Math (Primary School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Philosophy (High School)|0": 0, |
|
"community|arabic_mmlu:Physics (High School)|0": 0, |
|
"community|arabic_mmlu:Political Science (University)|0": 0, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": 0, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": 0, |
|
"community|arabic_mmlu_ht:anatomy|0": 0, |
|
"community|arabic_mmlu_ht:astronomy|0": 0, |
|
"community|arabic_mmlu_ht:business_ethics|0": 0, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": 0, |
|
"community|arabic_mmlu_ht:college_biology|0": 0, |
|
"community|arabic_mmlu_ht:college_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:college_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:college_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:college_medicine|0": 0, |
|
"community|arabic_mmlu_ht:college_physics|0": 0, |
|
"community|arabic_mmlu_ht:computer_security|0": 0, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": 0, |
|
"community|arabic_mmlu_ht:econometrics|0": 0, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": 0, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:formal_logic|0": 0, |
|
"community|arabic_mmlu_ht:global_facts|0": 0, |
|
"community|arabic_mmlu_ht:high_school_biology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_geography|0": 0, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_physics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": 0, |
|
"community|arabic_mmlu_ht:human_aging|0": 0, |
|
"community|arabic_mmlu_ht:human_sexuality|0": 0, |
|
"community|arabic_mmlu_ht:international_law|0": 0, |
|
"community|arabic_mmlu_ht:jurisprudence|0": 0, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": 0, |
|
"community|arabic_mmlu_ht:machine_learning|0": 0, |
|
"community|arabic_mmlu_ht:management|0": 0, |
|
"community|arabic_mmlu_ht:marketing|0": 0, |
|
"community|arabic_mmlu_ht:medical_genetics|0": 0, |
|
"community|arabic_mmlu_ht:miscellaneous|0": 0, |
|
"community|arabic_mmlu_ht:moral_disputes|0": 0, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": 0, |
|
"community|arabic_mmlu_ht:nutrition|0": 0, |
|
"community|arabic_mmlu_ht:philosophy|0": 0, |
|
"community|arabic_mmlu_ht:prehistory|0": 0, |
|
"community|arabic_mmlu_ht:professional_accounting|0": 0, |
|
"community|arabic_mmlu_ht:professional_law|0": 0, |
|
"community|arabic_mmlu_ht:professional_medicine|0": 0, |
|
"community|arabic_mmlu_ht:professional_psychology|0": 0, |
|
"community|arabic_mmlu_ht:public_relations|0": 0, |
|
"community|arabic_mmlu_ht:security_studies|0": 0, |
|
"community|arabic_mmlu_ht:sociology|0": 0, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": 0, |
|
"community|arabic_mmlu_ht:virology|0": 0, |
|
"community|arabic_mmlu_ht:world_religions|0": 0, |
|
"community|aratrust:Ethics|0": 0, |
|
"community|aratrust:Illegal|0": 0, |
|
"community|aratrust:MentalHealth|0": 0, |
|
"community|aratrust:Offensive|0": 0, |
|
"community|aratrust:PhysicalHealth|0": 0, |
|
"community|aratrust:Privacy|0": 0, |
|
"community|aratrust:Trustfulness|0": 0, |
|
"community|aratrust:Unfairness|0": 0, |
|
"community|madinah_qa:Arabic Language (General)|0": 0, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": 0, |
|
"community|alrage_qa|0": 0 |
|
}, |
|
"config_tasks": { |
|
"community|alghafa:mcq_exams_test_ar": { |
|
"name": "alghafa:mcq_exams_test_ar", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "mcq_exams_test_ar", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 557, |
|
"effective_num_docs": 557, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects": { |
|
"name": "alghafa:meta_ar_dialects", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_dialects", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5395, |
|
"effective_num_docs": 5395, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa": { |
|
"name": "alghafa:meta_ar_msa", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_msa", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { |
|
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_soqal_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7995, |
|
"effective_num_docs": 7995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5995, |
|
"effective_num_docs": 5995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task": { |
|
"name": "alghafa:multiple_choice_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1720, |
|
"effective_num_docs": 1720, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_exams": { |
|
"name": "arabic_exams", |
|
"prompt_function": "arabic_exams_pfn", |
|
"hf_repo": "OALL/Arabic_EXAMS", |
|
"hf_subset": "default", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 537, |
|
"effective_num_docs": 537, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)": { |
|
"name": "arabic_mmlu:Accounting (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Accounting (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 74, |
|
"effective_num_docs": 74, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)": { |
|
"name": "arabic_mmlu:Arabic Language (General)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)": { |
|
"name": "arabic_mmlu:Arabic Language (Grammar)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)": { |
|
"name": "arabic_mmlu:Arabic Language (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)": { |
|
"name": "arabic_mmlu:Arabic Language (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)": { |
|
"name": "arabic_mmlu:Arabic Language (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 252, |
|
"effective_num_docs": 252, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)": { |
|
"name": "arabic_mmlu:Biology (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Biology (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1409, |
|
"effective_num_docs": 1409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)": { |
|
"name": "arabic_mmlu:Civics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)": { |
|
"name": "arabic_mmlu:Civics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 236, |
|
"effective_num_docs": 236, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)": { |
|
"name": "arabic_mmlu:Computer Science (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 261, |
|
"effective_num_docs": 261, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)": { |
|
"name": "arabic_mmlu:Computer Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)": { |
|
"name": "arabic_mmlu:Computer Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 190, |
|
"effective_num_docs": 190, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)": { |
|
"name": "arabic_mmlu:Computer Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 64, |
|
"effective_num_docs": 64, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test": { |
|
"name": "arabic_mmlu:Driving Test", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Driving Test", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1211, |
|
"effective_num_docs": 1211, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)": { |
|
"name": "arabic_mmlu:Economics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 360, |
|
"effective_num_docs": 360, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)": { |
|
"name": "arabic_mmlu:Economics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)": { |
|
"name": "arabic_mmlu:Economics (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 137, |
|
"effective_num_docs": 137, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge": { |
|
"name": "arabic_mmlu:General Knowledge", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 864, |
|
"effective_num_docs": 864, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)": { |
|
"name": "arabic_mmlu:General Knowledge (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 172, |
|
"effective_num_docs": 172, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)": { |
|
"name": "arabic_mmlu:General Knowledge (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 162, |
|
"effective_num_docs": 162, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)": { |
|
"name": "arabic_mmlu:Geography (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1038, |
|
"effective_num_docs": 1038, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)": { |
|
"name": "arabic_mmlu:Geography (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)": { |
|
"name": "arabic_mmlu:Geography (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)": { |
|
"name": "arabic_mmlu:History (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 760, |
|
"effective_num_docs": 760, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)": { |
|
"name": "arabic_mmlu:History (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)": { |
|
"name": "arabic_mmlu:History (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies": { |
|
"name": "arabic_mmlu:Islamic Studies", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 639, |
|
"effective_num_docs": 639, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)": { |
|
"name": "arabic_mmlu:Islamic Studies (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 334, |
|
"effective_num_docs": 334, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 999, |
|
"effective_num_docs": 999, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)": { |
|
"name": "arabic_mmlu:Law (Professional)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Law (Professional)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 314, |
|
"effective_num_docs": 314, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)": { |
|
"name": "arabic_mmlu:Management (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Management (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)": { |
|
"name": "arabic_mmlu:Math (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Math (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 409, |
|
"effective_num_docs": 409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)": { |
|
"name": "arabic_mmlu:Natural Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 242, |
|
"effective_num_docs": 242, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)": { |
|
"name": "arabic_mmlu:Natural Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 336, |
|
"effective_num_docs": 336, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)": { |
|
"name": "arabic_mmlu:Philosophy (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Philosophy (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 39, |
|
"effective_num_docs": 39, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)": { |
|
"name": "arabic_mmlu:Physics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Physics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 255, |
|
"effective_num_docs": 255, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)": { |
|
"name": "arabic_mmlu:Political Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Political Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 210, |
|
"effective_num_docs": 210, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)": { |
|
"name": "arabic_mmlu:Social Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 241, |
|
"effective_num_docs": 241, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)": { |
|
"name": "arabic_mmlu:Social Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 705, |
|
"effective_num_docs": 705, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra": { |
|
"name": "arabic_mmlu_ht:abstract_algebra", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy": { |
|
"name": "arabic_mmlu_ht:anatomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy": { |
|
"name": "arabic_mmlu_ht:astronomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics": { |
|
"name": "arabic_mmlu_ht:business_ethics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge": { |
|
"name": "arabic_mmlu_ht:clinical_knowledge", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology": { |
|
"name": "arabic_mmlu_ht:college_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry": { |
|
"name": "arabic_mmlu_ht:college_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science": { |
|
"name": "arabic_mmlu_ht:college_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics": { |
|
"name": "arabic_mmlu_ht:college_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine": { |
|
"name": "arabic_mmlu_ht:college_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics": { |
|
"name": "arabic_mmlu_ht:college_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security": { |
|
"name": "arabic_mmlu_ht:computer_security", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics": { |
|
"name": "arabic_mmlu_ht:conceptual_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics": { |
|
"name": "arabic_mmlu_ht:econometrics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering": { |
|
"name": "arabic_mmlu_ht:electrical_engineering", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics": { |
|
"name": "arabic_mmlu_ht:elementary_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic": { |
|
"name": "arabic_mmlu_ht:formal_logic", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts": { |
|
"name": "arabic_mmlu_ht:global_facts", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology": { |
|
"name": "arabic_mmlu_ht:high_school_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3813, |
|
"effective_num_docs": 3813, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry": { |
|
"name": "arabic_mmlu_ht:high_school_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4016, |
|
"effective_num_docs": 4016, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science": { |
|
"name": "arabic_mmlu_ht:high_school_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history": { |
|
"name": "arabic_mmlu_ht:high_school_european_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 8152, |
|
"effective_num_docs": 8152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography": { |
|
"name": "arabic_mmlu_ht:high_school_geography", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics": { |
|
"name": "arabic_mmlu_ht:high_school_government_and_politics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_macroeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2891, |
|
"effective_num_docs": 2891, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics": { |
|
"name": "arabic_mmlu_ht:high_school_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_microeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics": { |
|
"name": "arabic_mmlu_ht:high_school_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology": { |
|
"name": "arabic_mmlu_ht:high_school_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics": { |
|
"name": "arabic_mmlu_ht:high_school_statistics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4232, |
|
"effective_num_docs": 4232, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history": { |
|
"name": "arabic_mmlu_ht:high_school_us_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history": { |
|
"name": "arabic_mmlu_ht:high_school_world_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging": { |
|
"name": "arabic_mmlu_ht:human_aging", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality": { |
|
"name": "arabic_mmlu_ht:human_sexuality", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law": { |
|
"name": "arabic_mmlu_ht:international_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence": { |
|
"name": "arabic_mmlu_ht:jurisprudence", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies": { |
|
"name": "arabic_mmlu_ht:logical_fallacies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning": { |
|
"name": "arabic_mmlu_ht:machine_learning", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management": { |
|
"name": "arabic_mmlu_ht:management", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "management", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing": { |
|
"name": "arabic_mmlu_ht:marketing", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics": { |
|
"name": "arabic_mmlu_ht:medical_genetics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous": { |
|
"name": "arabic_mmlu_ht:miscellaneous", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2420, |
|
"effective_num_docs": 2420, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes": { |
|
"name": "arabic_mmlu_ht:moral_disputes", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios": { |
|
"name": "arabic_mmlu_ht:moral_scenarios", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition": { |
|
"name": "arabic_mmlu_ht:nutrition", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy": { |
|
"name": "arabic_mmlu_ht:philosophy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory": { |
|
"name": "arabic_mmlu_ht:prehistory", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting": { |
|
"name": "arabic_mmlu_ht:professional_accounting", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4514, |
|
"effective_num_docs": 4514, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law": { |
|
"name": "arabic_mmlu_ht:professional_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7987, |
|
"effective_num_docs": 7987, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine": { |
|
"name": "arabic_mmlu_ht:professional_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1637, |
|
"effective_num_docs": 1637, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology": { |
|
"name": "arabic_mmlu_ht:professional_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3503, |
|
"effective_num_docs": 3503, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations": { |
|
"name": "arabic_mmlu_ht:public_relations", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies": { |
|
"name": "arabic_mmlu_ht:security_studies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology": { |
|
"name": "arabic_mmlu_ht:sociology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy": { |
|
"name": "arabic_mmlu_ht:us_foreign_policy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology": { |
|
"name": "arabic_mmlu_ht:virology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions": { |
|
"name": "arabic_mmlu_ht:world_religions", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Ethics": { |
|
"name": "aratrust:Ethics", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 60, |
|
"effective_num_docs": 60, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Illegal": { |
|
"name": "aratrust:Illegal", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Illegal", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 53, |
|
"effective_num_docs": 53, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:MentalHealth": { |
|
"name": "aratrust:MentalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "MentalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 76, |
|
"effective_num_docs": 76, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Offensive": { |
|
"name": "aratrust:Offensive", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Offensive", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 69, |
|
"effective_num_docs": 69, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth": { |
|
"name": "aratrust:PhysicalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "PhysicalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 73, |
|
"effective_num_docs": 73, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Privacy": { |
|
"name": "aratrust:Privacy", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Privacy", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Trustfulness": { |
|
"name": "aratrust:Trustfulness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Trustfulness", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 78, |
|
"effective_num_docs": 78, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Unfairness": { |
|
"name": "aratrust:Unfairness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Unfairness", |
|
"metric": [ |
|
{ |
|
"metric_name": "f1", |
|
"higher_is_better": true, |
|
"category": "3", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 55, |
|
"effective_num_docs": 55, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)": { |
|
"name": "madinah_qa:Arabic Language (General)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)": { |
|
"name": "madinah_qa:Arabic Language (Grammar)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alrage_qa": { |
|
"name": "alrage_qa", |
|
"prompt_function": "qa_prompt_arabic", |
|
"hf_repo": "OALL/ALRAGE", |
|
"hf_subset": null, |
|
"metric": [ |
|
{ |
|
"metric_name": "llm_as_judge", |
|
"higher_is_better": true, |
|
"category": "7", |
|
"use_case": "10", |
|
"sample_level_fn": "_sample_level_fn", |
|
"corpus_level_fn": "aggregate_scores" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": 200, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2106, |
|
"effective_num_docs": 2106, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"hashes": { |
|
"hash_examples": "c07a5e78c5c0b8fe", |
|
"hash_full_prompts": "f02937ba204b1ad8", |
|
"hash_input_tokens": "c5d46caae299de3a", |
|
"hash_cont_tokens": "d6519f4be64b0b3f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 557, |
|
"padded": 2228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"hashes": { |
|
"hash_examples": "c0b6081f83e14064", |
|
"hash_full_prompts": "697d1eedfdb9c04c", |
|
"hash_input_tokens": "b8f44fefcc762655", |
|
"hash_cont_tokens": "fe549766a0ce738a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5395, |
|
"padded": 21580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"hashes": { |
|
"hash_examples": "64eb78a7c5b7484b", |
|
"hash_full_prompts": "3e3d8fb5dcd581d1", |
|
"hash_input_tokens": "1b90412e20fb6a54", |
|
"hash_cont_tokens": "0349d70fe949b783" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"hashes": { |
|
"hash_examples": "54fc3502c1c02c06", |
|
"hash_full_prompts": "8d202b92b5bbc680", |
|
"hash_input_tokens": "351024b1610dbe3f", |
|
"hash_cont_tokens": "65b9e8001c0d9d5e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 150, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"hashes": { |
|
"hash_examples": "46572d83696552ae", |
|
"hash_full_prompts": "abea4c437d96afea", |
|
"hash_input_tokens": "df82ce8e5f198ab4", |
|
"hash_cont_tokens": "707d382e7333be99" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"hashes": { |
|
"hash_examples": "f430d97ff715bc1c", |
|
"hash_full_prompts": "a0bcf40e918e6155", |
|
"hash_input_tokens": "1f3d0fb6dd6d6c07", |
|
"hash_cont_tokens": "b2739963cb832e04" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"hashes": { |
|
"hash_examples": "6b70a7416584f98c", |
|
"hash_full_prompts": "439c16f6cb92a3c0", |
|
"hash_input_tokens": "23c3f6827c8bb35d", |
|
"hash_cont_tokens": "b5f274f703abc5b7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7995, |
|
"padded": 15990, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "bc2005cc9d2f436e", |
|
"hash_full_prompts": "1030e11d7cf70b2d", |
|
"hash_input_tokens": "63bef6cbe37cafe3", |
|
"hash_cont_tokens": "87c18aae5cfd812b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5995, |
|
"padded": 17921, |
|
"non_padded": 64, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "6fb0e254ea5945d8", |
|
"hash_full_prompts": "890f5941deb3c0f8", |
|
"hash_input_tokens": "f6723eaa4b629d8a", |
|
"hash_cont_tokens": "8e03dcc04ffbd0bd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1720, |
|
"padded": 5096, |
|
"non_padded": 64, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_exams|0": { |
|
"hashes": { |
|
"hash_examples": "6d721df351722656", |
|
"hash_full_prompts": "55e0657bdc4b57ec", |
|
"hash_input_tokens": "2de50619448b122c", |
|
"hash_cont_tokens": "a4ef65bc7bab8dcf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 537, |
|
"padded": 2096, |
|
"non_padded": 52, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"hashes": { |
|
"hash_examples": "30e09697562ff9e7", |
|
"hash_full_prompts": "4741945dc05c404c", |
|
"hash_input_tokens": "d9d440af4626d814", |
|
"hash_cont_tokens": "587bf4caea1658f4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 74, |
|
"padded": 256, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "bef69fb8b3b75f28", |
|
"hash_full_prompts": "defb906f381cfcd9", |
|
"hash_input_tokens": "84e79f77f9c8b247", |
|
"hash_cont_tokens": "95a234c727b7b43c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2400, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "bd066a9e6a140a4b", |
|
"hash_full_prompts": "070068c411fb3997", |
|
"hash_input_tokens": "45ae2faa900c93a1", |
|
"hash_cont_tokens": "98f874e7446c544b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1545, |
|
"non_padded": 43, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a9c2cd9a9929292a", |
|
"hash_full_prompts": "b876f89bc9dcafa1", |
|
"hash_input_tokens": "6557f4b55024c336", |
|
"hash_cont_tokens": "3d676d0d2f081e05" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1505, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "2f8a77bbbd0e21ff", |
|
"hash_full_prompts": "e677edecf0bd75d5", |
|
"hash_input_tokens": "2370a7df88f6ac3c", |
|
"hash_cont_tokens": "322ea7667dfc2c2d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 105, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5eed3da47822539b", |
|
"hash_full_prompts": "94d49814387fbb4e", |
|
"hash_input_tokens": "19bc93cb59607162", |
|
"hash_cont_tokens": "f3c78f80ddea1519" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 252, |
|
"padded": 918, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "91ae6d22a0f0213d", |
|
"hash_full_prompts": "16e71c6d0e42d330", |
|
"hash_input_tokens": "a9c9e2f98f772743", |
|
"hash_cont_tokens": "aaa20fdc3c06d2c3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1409, |
|
"padded": 4968, |
|
"non_padded": 88, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "f27bf8791bea2bb9", |
|
"hash_full_prompts": "8094808a6f0fa9b9", |
|
"hash_input_tokens": "54340cd4cbcb0ed6", |
|
"hash_cont_tokens": "e02c7ebfec7f8df8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 312, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "74f5bb0098c8916f", |
|
"hash_full_prompts": "47c41a36aa6573c3", |
|
"hash_input_tokens": "65fd2ed38bf47e12", |
|
"hash_cont_tokens": "1ffdd9a463183bfa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 236, |
|
"padded": 940, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a4278d7b525d46fe", |
|
"hash_full_prompts": "4191fc5811855ee4", |
|
"hash_input_tokens": "e3936fe5034fe2b4", |
|
"hash_cont_tokens": "821feca3d9004c98" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 261, |
|
"padded": 994, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "0cb6c07e4b80dfd4", |
|
"hash_full_prompts": "bb0869b87598266f", |
|
"hash_input_tokens": "1175a53ab1bd5d04", |
|
"hash_cont_tokens": "8b4f299b6f012a83" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 100, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "d96fc1bc32473533", |
|
"hash_full_prompts": "7ed3631dd316f004", |
|
"hash_input_tokens": "0aba5ea716735f7a", |
|
"hash_cont_tokens": "1bc67f97b48b9ece" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 190, |
|
"padded": 476, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "8835587e436cbaff", |
|
"hash_full_prompts": "6250444a7b9604b2", |
|
"hash_input_tokens": "997e414c6928c648", |
|
"hash_cont_tokens": "e9d871459bc85f62" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 64, |
|
"padded": 247, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"hashes": { |
|
"hash_examples": "7a4c38a2c451d075", |
|
"hash_full_prompts": "45b7b936d82c1150", |
|
"hash_input_tokens": "2c88f259576e1eb8", |
|
"hash_cont_tokens": "cd411982b0f12d43" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1211, |
|
"padded": 3606, |
|
"non_padded": 79, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c04c252836601279", |
|
"hash_full_prompts": "214ea55ed2547a5e", |
|
"hash_input_tokens": "96597af859f46d09", |
|
"hash_cont_tokens": "4bda66df90f2d4d8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 360, |
|
"padded": 1374, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "18fba1579406b3cc", |
|
"hash_full_prompts": "bf2e646324d90429", |
|
"hash_input_tokens": "2a1e021fa1be08ed", |
|
"hash_cont_tokens": "3ea283b0f50a72f5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 344, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"hashes": { |
|
"hash_examples": "7c9e86fba8151562", |
|
"hash_full_prompts": "cb280cc805670549", |
|
"hash_input_tokens": "c3a9aa441a9c0ec7", |
|
"hash_cont_tokens": "91cdb256248a5bdf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 137, |
|
"padded": 532, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "acfbe4e1f0314b85", |
|
"hash_full_prompts": "755f37afe33f7559", |
|
"hash_input_tokens": "9a53b78640fc569a", |
|
"hash_cont_tokens": "76d704fbedbe5ab8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 864, |
|
"padded": 3169, |
|
"non_padded": 44, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "03cd0ecf10224316", |
|
"hash_full_prompts": "900b57bd55d23559", |
|
"hash_input_tokens": "e3c925f37edea57c", |
|
"hash_cont_tokens": "aff2aed9268be2e2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 172, |
|
"padded": 607, |
|
"non_padded": 21, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "c3ee30196e05e122", |
|
"hash_full_prompts": "a0a424f0a3b0eac3", |
|
"hash_input_tokens": "cefe36a7c4833462", |
|
"hash_cont_tokens": "6c8978669cdc11fb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 162, |
|
"padded": 629, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "e2e329d2bdd9fb7b", |
|
"hash_full_prompts": "5e3f92a804772548", |
|
"hash_input_tokens": "30d57ff320b61c5e", |
|
"hash_cont_tokens": "37e2e9c548d8c904" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1038, |
|
"padded": 4052, |
|
"non_padded": 64, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "420b161444291989", |
|
"hash_full_prompts": "7b0ccab5b80729bd", |
|
"hash_input_tokens": "9b2afb2cfa9c884f", |
|
"hash_cont_tokens": "5e24bb4c8be23901" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 966, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5bc5ca48a4210899", |
|
"hash_full_prompts": "492ea8c528d1a4ce", |
|
"hash_input_tokens": "e6a84cc7fbbea387", |
|
"hash_cont_tokens": "b9f45957a97d1ecf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 57, |
|
"padded": 216, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c7cc37f29311bea1", |
|
"hash_full_prompts": "3b1781c2884844f2", |
|
"hash_input_tokens": "33e94552f837f9fb", |
|
"hash_cont_tokens": "b16e65544485acae" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 760, |
|
"padded": 2886, |
|
"non_padded": 76, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "5b9f1973337153a2", |
|
"hash_full_prompts": "61496c59903567ab", |
|
"hash_input_tokens": "5f16dccac0a842f3", |
|
"hash_cont_tokens": "e3b355a58a286ee4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 734, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "af2469847007c1fe", |
|
"hash_full_prompts": "bef057b12f592695", |
|
"hash_input_tokens": "976354c56818402b", |
|
"hash_cont_tokens": "a52a22630c3cb3f7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"hashes": { |
|
"hash_examples": "c8da9b2f16a5ea0f", |
|
"hash_full_prompts": "0e5f1eb074dc0576", |
|
"hash_input_tokens": "bfc64f6f39074abb", |
|
"hash_cont_tokens": "1866597a67ff4424" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 639, |
|
"padded": 2493, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "efb11bc8ef398117", |
|
"hash_full_prompts": "f744eb11c6b54d16", |
|
"hash_input_tokens": "08de9bc825a5d321", |
|
"hash_cont_tokens": "6b678abb2fd451bd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 334, |
|
"padded": 1281, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "9e33ab030eebdb99", |
|
"hash_full_prompts": "7813be863908efee", |
|
"hash_input_tokens": "463ec973157780e5", |
|
"hash_cont_tokens": "e0c922e595ad51cd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 867, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "4167565d878b20eb", |
|
"hash_full_prompts": "4aca4836da321f21", |
|
"hash_input_tokens": "99734f514241a3c7", |
|
"hash_cont_tokens": "97e2182a63c5686c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 999, |
|
"padded": 2969, |
|
"non_padded": 55, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"hashes": { |
|
"hash_examples": "e77f52c8fe4352b3", |
|
"hash_full_prompts": "a3be12bf791af8de", |
|
"hash_input_tokens": "c5c7dc6826385845", |
|
"hash_cont_tokens": "324cc46c561b417c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 314, |
|
"padded": 1223, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"hashes": { |
|
"hash_examples": "09682649b04b7327", |
|
"hash_full_prompts": "78d4da7370157ec4", |
|
"hash_input_tokens": "54d94ede5a933a47", |
|
"hash_cont_tokens": "1e98e1e2cd19a5e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 200, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "edb027bfae7e76f1", |
|
"hash_full_prompts": "178f2eb46937183b", |
|
"hash_input_tokens": "75bfc213a86175e1", |
|
"hash_cont_tokens": "632401a080490684" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 409, |
|
"padded": 1290, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "96e72c9094c2364c", |
|
"hash_full_prompts": "239009c63aea5254", |
|
"hash_input_tokens": "e7158be6feedd6b0", |
|
"hash_cont_tokens": "17e42af5dbb9eee1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 242, |
|
"padded": 924, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "69e35bad3dec5a4d", |
|
"hash_full_prompts": "4944591b3477da56", |
|
"hash_input_tokens": "450d0c7980792e34", |
|
"hash_cont_tokens": "a7423721c9837336" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 336, |
|
"padded": 1206, |
|
"non_padded": 22, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "dc6ebd484a02fca5", |
|
"hash_full_prompts": "2e02d39134ca27b9", |
|
"hash_input_tokens": "f701902515c930b6", |
|
"hash_cont_tokens": "69b31fc6977897bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 39, |
|
"padded": 156, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "58a1722472c9e644", |
|
"hash_full_prompts": "8775cd855e0037a0", |
|
"hash_input_tokens": "cd4e875d7682e885", |
|
"hash_cont_tokens": "f9506aa86f66954d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 255, |
|
"padded": 996, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "07a4ed6aabbdfd1e", |
|
"hash_full_prompts": "37e2532e45ac8d56", |
|
"hash_input_tokens": "58d50b4224872505", |
|
"hash_cont_tokens": "4799b66f49438465" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 210, |
|
"padded": 688, |
|
"non_padded": 22, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "8ca955902f304664", |
|
"hash_full_prompts": "d16202ba6df674e8", |
|
"hash_input_tokens": "986dc1d7d3e7e2e4", |
|
"hash_cont_tokens": "4602cb88db99312d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 241, |
|
"padded": 919, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "934025ab3738123c", |
|
"hash_full_prompts": "978eed85ae542f82", |
|
"hash_input_tokens": "6007527150145bcd", |
|
"hash_cont_tokens": "19e973e9f05c9c82" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 705, |
|
"padded": 2004, |
|
"non_padded": 39, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "0b557911f2f6d919", |
|
"hash_full_prompts": "b9b74df23feeb3d4", |
|
"hash_input_tokens": "6e05db140b631116", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "a552d8a0ef294061", |
|
"hash_full_prompts": "b06f5aaf0f85df9f", |
|
"hash_input_tokens": "67bf2803fb118238", |
|
"hash_cont_tokens": "96c000fa61c3bd55" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 532, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "c4a372d0af7da098", |
|
"hash_full_prompts": "3a822121db18d697", |
|
"hash_input_tokens": "85ddaf31cb466ab7", |
|
"hash_cont_tokens": "b13cc32205751d90" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 604, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "9f71d816abf8af7a", |
|
"hash_full_prompts": "9c3db18f134ba3a8", |
|
"hash_input_tokens": "2c9776a82304f1a9", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "38303cd765589ef3", |
|
"hash_full_prompts": "b5da720810123a41", |
|
"hash_input_tokens": "478e660a7611b19e", |
|
"hash_cont_tokens": "c771582839d4f30c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1028, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "dbd9b5d318e60b04", |
|
"hash_full_prompts": "35dc7c41ef73879a", |
|
"hash_input_tokens": "bb74627898cf6e30", |
|
"hash_cont_tokens": "ec774ac0d0ad658b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 572, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "6f88491d03db8a4c", |
|
"hash_full_prompts": "60538c856b87e6f8", |
|
"hash_input_tokens": "3f341647d141f2e3", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "ebfdee5ef2ed5e17", |
|
"hash_full_prompts": "f5c6bef4fe087efc", |
|
"hash_input_tokens": "ae197f9f0b74255a", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "e3f22cd7712aae2f", |
|
"hash_full_prompts": "23654f332df33413", |
|
"hash_input_tokens": "e9b2485f3b7317b1", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "51a5501373afb5a7", |
|
"hash_full_prompts": "cd2cef8530d399a0", |
|
"hash_input_tokens": "98980a58c0e39af0", |
|
"hash_cont_tokens": "1823a754e6394181" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 680, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "2d3e015989b108db", |
|
"hash_full_prompts": "e410e1b05e941a97", |
|
"hash_input_tokens": "917ff93aa3f4402f", |
|
"hash_cont_tokens": "ee5dc873d27b9e10" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 404, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "f8810eddc38dfee4", |
|
"hash_full_prompts": "42cc7d49956effc3", |
|
"hash_input_tokens": "ff792b61c84db93f", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "211e32cc43c6b1dc", |
|
"hash_full_prompts": "ff91fa0a4e67f9a8", |
|
"hash_input_tokens": "88eb2efb515a65bb", |
|
"hash_cont_tokens": "b7b580bbcf7e0afa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 896, |
|
"non_padded": 44, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "810023786b2484d2", |
|
"hash_full_prompts": "eb28f6258f649706", |
|
"hash_input_tokens": "c96c1f4df4664579", |
|
"hash_cont_tokens": "d44932b2a931e093" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 452, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "a222760c93eaa1ee", |
|
"hash_full_prompts": "68e10a631a57f618", |
|
"hash_input_tokens": "60edabc0f14459e5", |
|
"hash_cont_tokens": "159f4cb1232d2a3c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 544, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "4c069aeee64dc227", |
|
"hash_full_prompts": "012257987ce861de", |
|
"hash_input_tokens": "f1145c2776587119", |
|
"hash_cont_tokens": "2bf44b70baf49dfa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1500, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "3cb0ccbf8e8a77ae", |
|
"hash_full_prompts": "1092202f23e61e35", |
|
"hash_input_tokens": "1efb064ea71f3f2c", |
|
"hash_cont_tokens": "8765c45f4711ebb8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 504, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "c1d039e64ea321b9", |
|
"hash_full_prompts": "7bfe9d88e40df3ee", |
|
"hash_input_tokens": "120c84337f637e6b", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 388, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "ddcb8237bb4ba08a", |
|
"hash_full_prompts": "db5ce7a5a6f1dbc3", |
|
"hash_input_tokens": "faae5858a4dec0fe", |
|
"hash_cont_tokens": "49908817551a4513" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3813, |
|
"padded": 15100, |
|
"non_padded": 152, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "07061b55c5c436d9", |
|
"hash_full_prompts": "742ff8decec1118f", |
|
"hash_input_tokens": "f659ee3dbb9b8d63", |
|
"hash_cont_tokens": "a7f16a586e1cfe0f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4016, |
|
"padded": 15912, |
|
"non_padded": 152, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "8d3405483d5fdcff", |
|
"hash_full_prompts": "544efc214e04a185", |
|
"hash_input_tokens": "5c72baed65799547", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "031c49a430356414", |
|
"hash_full_prompts": "36062098867d2fdf", |
|
"hash_input_tokens": "dbdc436363ab7ef7", |
|
"hash_cont_tokens": "5420388845898571" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 8152, |
|
"padded": 32448, |
|
"non_padded": 160, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "d0ce2b019a66c1de", |
|
"hash_full_prompts": "fc69d2030755028c", |
|
"hash_input_tokens": "c22e2cc72ad66a98", |
|
"hash_cont_tokens": "fa4a2c8384dfaaa5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 768, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "7d7c6d476d0576b1", |
|
"hash_full_prompts": "ea01912a5b3de0d4", |
|
"hash_input_tokens": "11c8a4bab7441292", |
|
"hash_cont_tokens": "682709d2fa91c75e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 768, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "694d3a01c6144ddb", |
|
"hash_full_prompts": "067fc8d5b4c2f558", |
|
"hash_input_tokens": "19e2a03c137c8970", |
|
"hash_cont_tokens": "4f2f97c723cb220f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2891, |
|
"padded": 11440, |
|
"non_padded": 124, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "004f9c0a40b5ec10", |
|
"hash_full_prompts": "1baa89b754304381", |
|
"hash_input_tokens": "e64a27ba52d9145a", |
|
"hash_cont_tokens": "8130a825e5a2ee3d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1072, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "80cf03d462e6ccbc", |
|
"hash_full_prompts": "45f2c96cec5cd28c", |
|
"hash_input_tokens": "d1383af8f0595dac", |
|
"hash_cont_tokens": "4f6974070ef28d29" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 948, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "92218def5b383845", |
|
"hash_full_prompts": "9d8ce8051351cf76", |
|
"hash_input_tokens": "ce71fd09e5fefc38", |
|
"hash_cont_tokens": "5d32bcd7ba8252ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 604, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "323f7848fee32e58", |
|
"hash_full_prompts": "63e76443cb7e9247", |
|
"hash_input_tokens": "c415415d338e3fdb", |
|
"hash_cont_tokens": "1512a6938229952b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2156, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "d7bbe0d037cf31ec", |
|
"hash_full_prompts": "31e18f289d3ff6bd", |
|
"hash_input_tokens": "0d165533ee3defd6", |
|
"hash_cont_tokens": "95cb29e5c31221c8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4232, |
|
"padded": 16776, |
|
"non_padded": 152, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "722ec9207e3b0e04", |
|
"hash_full_prompts": "a8049bd5bf8ffccf", |
|
"hash_input_tokens": "e503ed4f5fc40f1b", |
|
"hash_cont_tokens": "0c31c2de1e3429bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "b5eb675d3b578584", |
|
"hash_full_prompts": "51d86b4490ac7e71", |
|
"hash_input_tokens": "61ee1f212a3fa5f6", |
|
"hash_cont_tokens": "5e704d9d54138833" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "713ac79cd2dd2d7b", |
|
"hash_full_prompts": "f863ba5d24da8adc", |
|
"hash_input_tokens": "316a13071b03a265", |
|
"hash_cont_tokens": "e5a3e63957647f04" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 868, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "47551ab4e5dcf6c5", |
|
"hash_full_prompts": "eb92874ade3ce5f1", |
|
"hash_input_tokens": "e5911f9ce5e0fd83", |
|
"hash_cont_tokens": "90a9b6d1231332f4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 512, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "da360336943398d5", |
|
"hash_full_prompts": "03f5f8d05ff8b8d3", |
|
"hash_input_tokens": "74e78058c57966e5", |
|
"hash_cont_tokens": "9ab33ab519d55748" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 484, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "661d161a486fb035", |
|
"hash_full_prompts": "a6713089705fd8ee", |
|
"hash_input_tokens": "ea98394e44abbc36", |
|
"hash_cont_tokens": "2cecb6db3790a23b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 432, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "5c3926384758bda7", |
|
"hash_full_prompts": "12f9bceb00903955", |
|
"hash_input_tokens": "a56af9a9e4b03e05", |
|
"hash_cont_tokens": "f5c60e363dd9fc3d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 640, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "3ce756e6a22ffc48", |
|
"hash_full_prompts": "11f5a6e1e0c98e08", |
|
"hash_input_tokens": "7d0933c464b4381e", |
|
"hash_cont_tokens": "d41e7e44237c0a16" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 444, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"hashes": { |
|
"hash_examples": "20fe769bb3276832", |
|
"hash_full_prompts": "7062687337a3a595", |
|
"hash_input_tokens": "a14e25be589a8970", |
|
"hash_cont_tokens": "372864196dbb4cad" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 396, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "6b19449559d987ce", |
|
"hash_full_prompts": "9d23a14914d796f3", |
|
"hash_input_tokens": "a4f3cc7508d4e93b", |
|
"hash_cont_tokens": "ad74b6b4e88f6100" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 932, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "cbb0fa9df0f5435a", |
|
"hash_full_prompts": "9891ab73be820b06", |
|
"hash_input_tokens": "70af4afdc52c7941", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 384, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "0a4134046c23cff9", |
|
"hash_full_prompts": "cb347937c22a0fcb", |
|
"hash_input_tokens": "a5984d60b5ab4a6c", |
|
"hash_cont_tokens": "f2831dc319b7001c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2420, |
|
"padded": 9580, |
|
"non_padded": 100, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "1ac8a0967c82caa0", |
|
"hash_full_prompts": "49fadb27be0ce7aa", |
|
"hash_input_tokens": "df785a14b160f6d2", |
|
"hash_cont_tokens": "d6a32c4f89ec0e43" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1368, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "2c0670188bc5a789", |
|
"hash_full_prompts": "c4129c0b4487975b", |
|
"hash_input_tokens": "252322ad84c494d3", |
|
"hash_cont_tokens": "0abad6841e9b5dc1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3504, |
|
"non_padded": 76, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "658628c0dcdfe201", |
|
"hash_full_prompts": "dc303807e16966cd", |
|
"hash_input_tokens": "cc46276a307de526", |
|
"hash_cont_tokens": "1947ff415070dfa5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1208, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "8b6707b322affafd", |
|
"hash_full_prompts": "7a7cfceedd3d2ea9", |
|
"hash_input_tokens": "d1ec6f38f5a0a376", |
|
"hash_cont_tokens": "566ed263a8423f58" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1224, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "0c85ffcdc9a7b367", |
|
"hash_full_prompts": "438b0c8c256bd402", |
|
"hash_input_tokens": "85e4ccbc62fc9f5e", |
|
"hash_cont_tokens": "69725bb3099f23d0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1272, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "cce1ea2d5f544b2f", |
|
"hash_full_prompts": "0b347aee49687cec", |
|
"hash_input_tokens": "b4299b14a3ee4aa5", |
|
"hash_cont_tokens": "25802ac32c51a7f7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4514, |
|
"padded": 17819, |
|
"non_padded": 237, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "1c654b024b54eb4b", |
|
"hash_full_prompts": "5eaef9276cbb03a1", |
|
"hash_input_tokens": "4fb1579a4e12d007", |
|
"hash_cont_tokens": "7f2b1b7218a1ef40" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7987, |
|
"padded": 31596, |
|
"non_padded": 352, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "c621eaacfa662ebc", |
|
"hash_full_prompts": "827b2000a1b95969", |
|
"hash_input_tokens": "5bf5aaee785b701a", |
|
"hash_cont_tokens": "e1afe1503a5d02c5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1637, |
|
"padded": 6476, |
|
"non_padded": 72, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "bc14a28eaec87dc4", |
|
"hash_full_prompts": "ac74eed216ed2aa4", |
|
"hash_input_tokens": "479a3b289ebcb83c", |
|
"hash_cont_tokens": "1c41f3eeadeec685" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3503, |
|
"padded": 13760, |
|
"non_padded": 252, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "de4989d9375885c4", |
|
"hash_full_prompts": "b2a1a4d76e245a88", |
|
"hash_input_tokens": "5452755b3b65e771", |
|
"hash_cont_tokens": "3914ab4a5d5b69e8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 432, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "3f84bfeec717c6de", |
|
"hash_full_prompts": "2bdaa60ae6cdb812", |
|
"hash_input_tokens": "ef897bb433a39d1b", |
|
"hash_cont_tokens": "1d2b199df736dea4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 972, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "10d7c2fae10bfcbc", |
|
"hash_full_prompts": "6dc64bcb9a8e74b3", |
|
"hash_input_tokens": "f1ff20fc6f81e8ca", |
|
"hash_cont_tokens": "3556cb090eda6dec" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 788, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "bb05f02c38ddaf1a", |
|
"hash_full_prompts": "2c78f15085fe5d18", |
|
"hash_input_tokens": "cf3e4b9434a54962", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 376, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"hashes": { |
|
"hash_examples": "290915a48884ede2", |
|
"hash_full_prompts": "6ac0831aaf7baaa6", |
|
"hash_input_tokens": "09df497a0f9981ea", |
|
"hash_cont_tokens": "cbf93f8f3bd5c82c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 636, |
|
"non_padded": 28, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "91cc5451c7284f75", |
|
"hash_full_prompts": "bc7e3635516606d1", |
|
"hash_input_tokens": "8ce068df9a307425", |
|
"hash_cont_tokens": "b5fbc024ac54a858" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 672, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "bef69fb8b3b75f28", |
|
"hash_full_prompts": "defb906f381cfcd9", |
|
"hash_input_tokens": "cfd6f557e92b800e", |
|
"hash_cont_tokens": "05d3f2bc980e6cbb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2346, |
|
"non_padded": 57, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "bd066a9e6a140a4b", |
|
"hash_full_prompts": "070068c411fb3997", |
|
"hash_input_tokens": "f92e93204cd1dcfd", |
|
"hash_cont_tokens": "ac1327c8a93a78f2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1521, |
|
"non_padded": 67, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"hashes": { |
|
"hash_examples": "b77354655caca219", |
|
"hash_full_prompts": "3b6c35af9dd55ba5", |
|
"hash_input_tokens": "3f497e926dd77d76", |
|
"hash_cont_tokens": "ef0aae761dbc9563" |
|
}, |
|
"truncated": 36, |
|
"non_truncated": 24, |
|
"padded": 60, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"hashes": { |
|
"hash_examples": "daa90cfb03dd9ed8", |
|
"hash_full_prompts": "a1a797d500db526f", |
|
"hash_input_tokens": "28c17df2a02b1a45", |
|
"hash_cont_tokens": "9e4af615e4d8373c" |
|
}, |
|
"truncated": 37, |
|
"non_truncated": 16, |
|
"padded": 53, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "ca046355c96d95d9", |
|
"hash_full_prompts": "f0b0bf58012fc511", |
|
"hash_input_tokens": "c98394a996e20c73", |
|
"hash_cont_tokens": "55753deb743d2c3e" |
|
}, |
|
"truncated": 59, |
|
"non_truncated": 17, |
|
"padded": 76, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"hashes": { |
|
"hash_examples": "6ff77d23c0f3113d", |
|
"hash_full_prompts": "e44f96f8e6f119af", |
|
"hash_input_tokens": "6764872aaa2b5eec", |
|
"hash_cont_tokens": "0f15fbe9ae8d2ad9" |
|
}, |
|
"truncated": 40, |
|
"non_truncated": 29, |
|
"padded": 69, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "085db2421f8abf29", |
|
"hash_full_prompts": "76b6bac76dbaf06c", |
|
"hash_input_tokens": "4bb4cbf155e95693", |
|
"hash_cont_tokens": "86a3631c2986ee08" |
|
}, |
|
"truncated": 64, |
|
"non_truncated": 9, |
|
"padded": 73, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"hashes": { |
|
"hash_examples": "78f4d16753b18c49", |
|
"hash_full_prompts": "63c7263b4ad8a155", |
|
"hash_input_tokens": "97fef45f53c0229d", |
|
"hash_cont_tokens": "43ec4780893c5173" |
|
}, |
|
"truncated": 45, |
|
"non_truncated": 12, |
|
"padded": 56, |
|
"non_padded": 1, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"hashes": { |
|
"hash_examples": "373f72b4e30243c4", |
|
"hash_full_prompts": "9b519b28f08eebb6", |
|
"hash_input_tokens": "3dba8d8a5c44be92", |
|
"hash_cont_tokens": "c74eba74aea9d584" |
|
}, |
|
"truncated": 2, |
|
"non_truncated": 76, |
|
"padded": 78, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"hashes": { |
|
"hash_examples": "51fa7940e42ffcc6", |
|
"hash_full_prompts": "d5ca4d44e29290d9", |
|
"hash_input_tokens": "42cf3a01f02df209", |
|
"hash_cont_tokens": "afde88bab374803e" |
|
}, |
|
"truncated": 41, |
|
"non_truncated": 14, |
|
"padded": 55, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alrage_qa|0": { |
|
"hashes": { |
|
"hash_examples": "3edbbe22cabd4160", |
|
"hash_full_prompts": "bbeddba8b85e29c5", |
|
"hash_input_tokens": "dbccd73d9313a2b1", |
|
"hash_cont_tokens": "e6238b5a26703381" |
|
}, |
|
"truncated": 2106, |
|
"non_truncated": 0, |
|
"padded": 2106, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "b8b3b49631adcc40", |
|
"hash_full_prompts": "e48a7dc3cb64f4f5", |
|
"hash_input_tokens": "98ceab2143f4e323", |
|
"hash_cont_tokens": "786dfe8a7b41c516" |
|
}, |
|
"truncated": 324, |
|
"non_truncated": 91538, |
|
"padded": 333440, |
|
"non_padded": 3444, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |