v2_results
/
Isaak-Carter
/Josiefied-Qwen2.5-7B-Instruct-abliterated-v2
/results_2025-01-18T00-19-09.123773.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": 0, | |
"start_time": 718.952305897, | |
"end_time": 6056.479640683, | |
"total_evaluation_time_secondes": "5337.527334786", | |
"model_name": "Isaak-Carter/Josiefied-Qwen2.5-7B-Instruct-abliterated-v2", | |
"model_sha": "201c11e7416275d214e98f3726ed8abba88af33b", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "14.19 GB" | |
}, | |
"results": { | |
"community|alghafa:meta_ar_dialects|0": { | |
"acc_norm": 0.5712696941612604, | |
"acc_norm_stderr": 0.006738406606848431 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"acc_norm": 0.7284916201117319, | |
"acc_norm_stderr": 0.01487425216809527 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"acc_norm": 0.9066666666666666, | |
"acc_norm_stderr": 0.03381632066833329 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"acc_norm": 0.88, | |
"acc_norm_stderr": 0.026621886338401454 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"acc_norm": 0.8133333333333334, | |
"acc_norm_stderr": 0.03192085664169659 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"acc_norm": 0.7857410881801126, | |
"acc_norm_stderr": 0.0045890956260146165 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"acc_norm": 0.5414512093411176, | |
"acc_norm_stderr": 0.006435970887282992 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"acc_norm": 0.38546511627906976, | |
"acc_norm_stderr": 0.01173891269286054 | |
}, | |
"community|arabic_exams|0": { | |
"acc_norm": 0.44878957169459965, | |
"acc_norm_stderr": 0.02148313691486752 | |
}, | |
"community|arabic_mmlu:Accounting (University)|0": { | |
"acc_norm": 0.47297297297297297, | |
"acc_norm_stderr": 0.05843501715816532 | |
}, | |
"community|arabic_mmlu:Arabic Language (General)|0": { | |
"acc_norm": 0.5261437908496732, | |
"acc_norm_stderr": 0.020200164564804588 | |
}, | |
"community|arabic_mmlu:Arabic Language (Grammar)|0": { | |
"acc_norm": 0.4383561643835616, | |
"acc_norm_stderr": 0.026007186112291702 | |
}, | |
"community|arabic_mmlu:Arabic Language (High School)|0": { | |
"acc_norm": 0.33589743589743587, | |
"acc_norm_stderr": 0.023946724741563976 | |
}, | |
"community|arabic_mmlu:Arabic Language (Middle School)|0": { | |
"acc_norm": 0.5925925925925926, | |
"acc_norm_stderr": 0.09636202008710973 | |
}, | |
"community|arabic_mmlu:Arabic Language (Primary School)|0": { | |
"acc_norm": 0.5357142857142857, | |
"acc_norm_stderr": 0.03147910771121848 | |
}, | |
"community|arabic_mmlu:Biology (High School)|0": { | |
"acc_norm": 0.42725337118523776, | |
"acc_norm_stderr": 0.013183256210641037 | |
}, | |
"community|arabic_mmlu:Civics (High School)|0": { | |
"acc_norm": 0.42528735632183906, | |
"acc_norm_stderr": 0.05331106836455265 | |
}, | |
"community|arabic_mmlu:Civics (Middle School)|0": { | |
"acc_norm": 0.4745762711864407, | |
"acc_norm_stderr": 0.03257421209597024 | |
}, | |
"community|arabic_mmlu:Computer Science (High School)|0": { | |
"acc_norm": 0.6015325670498084, | |
"acc_norm_stderr": 0.030362624913653626 | |
}, | |
"community|arabic_mmlu:Computer Science (Middle School)|0": { | |
"acc_norm": 0.5925925925925926, | |
"acc_norm_stderr": 0.09636202008710973 | |
}, | |
"community|arabic_mmlu:Computer Science (Primary School)|0": { | |
"acc_norm": 0.7473684210526316, | |
"acc_norm_stderr": 0.031606782497111664 | |
}, | |
"community|arabic_mmlu:Computer Science (University)|0": { | |
"acc_norm": 0.609375, | |
"acc_norm_stderr": 0.06146842128667525 | |
}, | |
"community|arabic_mmlu:Driving Test|0": { | |
"acc_norm": 0.6061106523534269, | |
"acc_norm_stderr": 0.014046573343411686 | |
}, | |
"community|arabic_mmlu:Economics (High School)|0": { | |
"acc_norm": 0.5583333333333333, | |
"acc_norm_stderr": 0.02620878365075097 | |
}, | |
"community|arabic_mmlu:Economics (Middle School)|0": { | |
"acc_norm": 0.7471264367816092, | |
"acc_norm_stderr": 0.04687049503854671 | |
}, | |
"community|arabic_mmlu:Economics (University)|0": { | |
"acc_norm": 0.5036496350364964, | |
"acc_norm_stderr": 0.04287350410390777 | |
}, | |
"community|arabic_mmlu:General Knowledge|0": { | |
"acc_norm": 0.5347222222222222, | |
"acc_norm_stderr": 0.016979108125211872 | |
}, | |
"community|arabic_mmlu:General Knowledge (Middle School)|0": { | |
"acc_norm": 0.6337209302325582, | |
"acc_norm_stderr": 0.036843172681015855 | |
}, | |
"community|arabic_mmlu:General Knowledge (Primary School)|0": { | |
"acc_norm": 0.5802469135802469, | |
"acc_norm_stderr": 0.03889470040548675 | |
}, | |
"community|arabic_mmlu:Geography (High School)|0": { | |
"acc_norm": 0.45375722543352603, | |
"acc_norm_stderr": 0.015460205106928773 | |
}, | |
"community|arabic_mmlu:Geography (Middle School)|0": { | |
"acc_norm": 0.6433823529411765, | |
"acc_norm_stderr": 0.029097209568411976 | |
}, | |
"community|arabic_mmlu:Geography (Primary School)|0": { | |
"acc_norm": 0.5614035087719298, | |
"acc_norm_stderr": 0.0663095566682855 | |
}, | |
"community|arabic_mmlu:History (High School)|0": { | |
"acc_norm": 0.4144736842105263, | |
"acc_norm_stderr": 0.017881370514373447 | |
}, | |
"community|arabic_mmlu:History (Middle School)|0": { | |
"acc_norm": 0.541871921182266, | |
"acc_norm_stderr": 0.03505630140785741 | |
}, | |
"community|arabic_mmlu:History (Primary School)|0": { | |
"acc_norm": 0.5, | |
"acc_norm_stderr": 0.04975185951049946 | |
}, | |
"community|arabic_mmlu:Islamic Studies|0": { | |
"acc_norm": 0.3677621283255086, | |
"acc_norm_stderr": 0.019090329138246567 | |
}, | |
"community|arabic_mmlu:Islamic Studies (High School)|0": { | |
"acc_norm": 0.562874251497006, | |
"acc_norm_stderr": 0.027182335529915995 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { | |
"acc_norm": 0.5756302521008403, | |
"acc_norm_stderr": 0.03210479051015776 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { | |
"acc_norm": 0.6526526526526526, | |
"acc_norm_stderr": 0.015071545062775301 | |
}, | |
"community|arabic_mmlu:Law (Professional)|0": { | |
"acc_norm": 0.6910828025477707, | |
"acc_norm_stderr": 0.026116436415099382 | |
}, | |
"community|arabic_mmlu:Management (University)|0": { | |
"acc_norm": 0.64, | |
"acc_norm_stderr": 0.05579886659703324 | |
}, | |
"community|arabic_mmlu:Math (Primary School)|0": { | |
"acc_norm": 0.6454767726161369, | |
"acc_norm_stderr": 0.023682774239091705 | |
}, | |
"community|arabic_mmlu:Natural Science (Middle School)|0": { | |
"acc_norm": 0.6239669421487604, | |
"acc_norm_stderr": 0.03120220476133042 | |
}, | |
"community|arabic_mmlu:Natural Science (Primary School)|0": { | |
"acc_norm": 0.7261904761904762, | |
"acc_norm_stderr": 0.02436279696713548 | |
}, | |
"community|arabic_mmlu:Philosophy (High School)|0": { | |
"acc_norm": 0.5641025641025641, | |
"acc_norm_stderr": 0.08044135838502685 | |
}, | |
"community|arabic_mmlu:Physics (High School)|0": { | |
"acc_norm": 0.38823529411764707, | |
"acc_norm_stderr": 0.03057897034303607 | |
}, | |
"community|arabic_mmlu:Political Science (University)|0": { | |
"acc_norm": 0.5238095238095238, | |
"acc_norm_stderr": 0.034546488100476766 | |
}, | |
"community|arabic_mmlu:Social Science (Middle School)|0": { | |
"acc_norm": 0.44813278008298757, | |
"acc_norm_stderr": 0.03210073931508956 | |
}, | |
"community|arabic_mmlu:Social Science (Primary School)|0": { | |
"acc_norm": 0.6411347517730497, | |
"acc_norm_stderr": 0.018078151909972997 | |
}, | |
"community|arabic_mmlu_ht:abstract_algebra|0": { | |
"acc_norm": 0.19, | |
"acc_norm_stderr": 0.03942772444036623 | |
}, | |
"community|arabic_mmlu_ht:anatomy|0": { | |
"acc_norm": 0.34074074074074073, | |
"acc_norm_stderr": 0.04094376269996796 | |
}, | |
"community|arabic_mmlu_ht:astronomy|0": { | |
"acc_norm": 0.5263157894736842, | |
"acc_norm_stderr": 0.04063302731486671 | |
}, | |
"community|arabic_mmlu_ht:business_ethics|0": { | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.04943110704237102 | |
}, | |
"community|arabic_mmlu_ht:clinical_knowledge|0": { | |
"acc_norm": 0.5094339622641509, | |
"acc_norm_stderr": 0.0307673947078081 | |
}, | |
"community|arabic_mmlu_ht:college_biology|0": { | |
"acc_norm": 0.3541666666666667, | |
"acc_norm_stderr": 0.039994111357535424 | |
}, | |
"community|arabic_mmlu_ht:college_chemistry|0": { | |
"acc_norm": 0.4, | |
"acc_norm_stderr": 0.04923659639173309 | |
}, | |
"community|arabic_mmlu_ht:college_computer_science|0": { | |
"acc_norm": 0.44, | |
"acc_norm_stderr": 0.04988876515698589 | |
}, | |
"community|arabic_mmlu_ht:college_mathematics|0": { | |
"acc_norm": 0.32, | |
"acc_norm_stderr": 0.046882617226215034 | |
}, | |
"community|arabic_mmlu_ht:college_medicine|0": { | |
"acc_norm": 0.44508670520231214, | |
"acc_norm_stderr": 0.03789401760283647 | |
}, | |
"community|arabic_mmlu_ht:college_physics|0": { | |
"acc_norm": 0.38235294117647056, | |
"acc_norm_stderr": 0.04835503696107224 | |
}, | |
"community|arabic_mmlu_ht:computer_security|0": { | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.049604496374885836 | |
}, | |
"community|arabic_mmlu_ht:conceptual_physics|0": { | |
"acc_norm": 0.4425531914893617, | |
"acc_norm_stderr": 0.032469569197899575 | |
}, | |
"community|arabic_mmlu_ht:econometrics|0": { | |
"acc_norm": 0.2719298245614035, | |
"acc_norm_stderr": 0.041857744240220554 | |
}, | |
"community|arabic_mmlu_ht:electrical_engineering|0": { | |
"acc_norm": 0.41379310344827586, | |
"acc_norm_stderr": 0.04104269211806231 | |
}, | |
"community|arabic_mmlu_ht:elementary_mathematics|0": { | |
"acc_norm": 0.41798941798941797, | |
"acc_norm_stderr": 0.02540255550326091 | |
}, | |
"community|arabic_mmlu_ht:formal_logic|0": { | |
"acc_norm": 0.3968253968253968, | |
"acc_norm_stderr": 0.0437588849272706 | |
}, | |
"community|arabic_mmlu_ht:global_facts|0": { | |
"acc_norm": 0.25, | |
"acc_norm_stderr": 0.04351941398892446 | |
}, | |
"community|arabic_mmlu_ht:high_school_biology|0": { | |
"acc_norm": 0.4615788093364805, | |
"acc_norm_stderr": 0.008074349747679375 | |
}, | |
"community|arabic_mmlu_ht:high_school_chemistry|0": { | |
"acc_norm": 0.45617529880478086, | |
"acc_norm_stderr": 0.007860543451550068 | |
}, | |
"community|arabic_mmlu_ht:high_school_computer_science|0": { | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.04943110704237102 | |
}, | |
"community|arabic_mmlu_ht:high_school_european_history|0": { | |
"acc_norm": 0.3892296368989205, | |
"acc_norm_stderr": 0.005400531227231759 | |
}, | |
"community|arabic_mmlu_ht:high_school_geography|0": { | |
"acc_norm": 0.5808080808080808, | |
"acc_norm_stderr": 0.035155207286704175 | |
}, | |
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { | |
"acc_norm": 0.5492227979274611, | |
"acc_norm_stderr": 0.035909109522355244 | |
}, | |
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { | |
"acc_norm": 0.48875821515046697, | |
"acc_norm_stderr": 0.009298465518259174 | |
}, | |
"community|arabic_mmlu_ht:high_school_mathematics|0": { | |
"acc_norm": 0.31851851851851853, | |
"acc_norm_stderr": 0.028406533090608463 | |
}, | |
"community|arabic_mmlu_ht:high_school_microeconomics|0": { | |
"acc_norm": 0.5210084033613446, | |
"acc_norm_stderr": 0.03244980849990029 | |
}, | |
"community|arabic_mmlu_ht:high_school_physics|0": { | |
"acc_norm": 0.36423841059602646, | |
"acc_norm_stderr": 0.03929111781242742 | |
}, | |
"community|arabic_mmlu_ht:high_school_psychology|0": { | |
"acc_norm": 0.5486238532110091, | |
"acc_norm_stderr": 0.02133571471126877 | |
}, | |
"community|arabic_mmlu_ht:high_school_statistics|0": { | |
"acc_norm": 0.457703213610586, | |
"acc_norm_stderr": 0.007659298294044689 | |
}, | |
"community|arabic_mmlu_ht:high_school_us_history|0": { | |
"acc_norm": 0.35784313725490197, | |
"acc_norm_stderr": 0.03364487286088298 | |
}, | |
"community|arabic_mmlu_ht:high_school_world_history|0": { | |
"acc_norm": 0.38396624472573837, | |
"acc_norm_stderr": 0.031658678064106674 | |
}, | |
"community|arabic_mmlu_ht:human_aging|0": { | |
"acc_norm": 0.40358744394618834, | |
"acc_norm_stderr": 0.03292802819330314 | |
}, | |
"community|arabic_mmlu_ht:human_sexuality|0": { | |
"acc_norm": 0.48854961832061067, | |
"acc_norm_stderr": 0.043841400240780176 | |
}, | |
"community|arabic_mmlu_ht:international_law|0": { | |
"acc_norm": 0.4380165289256198, | |
"acc_norm_stderr": 0.045291468044357915 | |
}, | |
"community|arabic_mmlu_ht:jurisprudence|0": { | |
"acc_norm": 0.4166666666666667, | |
"acc_norm_stderr": 0.04766075165356462 | |
}, | |
"community|arabic_mmlu_ht:logical_fallacies|0": { | |
"acc_norm": 0.3312883435582822, | |
"acc_norm_stderr": 0.03697983910025588 | |
}, | |
"community|arabic_mmlu_ht:machine_learning|0": { | |
"acc_norm": 0.3125, | |
"acc_norm_stderr": 0.043994650575715215 | |
}, | |
"community|arabic_mmlu_ht:management|0": { | |
"acc_norm": 0.5048543689320388, | |
"acc_norm_stderr": 0.049505043821289195 | |
}, | |
"community|arabic_mmlu_ht:marketing|0": { | |
"acc_norm": 0.5641025641025641, | |
"acc_norm_stderr": 0.03248577511578401 | |
}, | |
"community|arabic_mmlu_ht:medical_genetics|0": { | |
"acc_norm": 0.41, | |
"acc_norm_stderr": 0.049431107042371025 | |
}, | |
"community|arabic_mmlu_ht:miscellaneous|0": { | |
"acc_norm": 0.493801652892562, | |
"acc_norm_stderr": 0.01016526481306677 | |
}, | |
"community|arabic_mmlu_ht:moral_disputes|0": { | |
"acc_norm": 0.38439306358381503, | |
"acc_norm_stderr": 0.026189666966272035 | |
}, | |
"community|arabic_mmlu_ht:moral_scenarios|0": { | |
"acc_norm": 0.2770949720670391, | |
"acc_norm_stderr": 0.014968772435812147 | |
}, | |
"community|arabic_mmlu_ht:nutrition|0": { | |
"acc_norm": 0.4934640522875817, | |
"acc_norm_stderr": 0.028627470550556047 | |
}, | |
"community|arabic_mmlu_ht:philosophy|0": { | |
"acc_norm": 0.3633440514469453, | |
"acc_norm_stderr": 0.027316847674192707 | |
}, | |
"community|arabic_mmlu_ht:prehistory|0": { | |
"acc_norm": 0.36419753086419754, | |
"acc_norm_stderr": 0.026774929899722317 | |
}, | |
"community|arabic_mmlu_ht:professional_accounting|0": { | |
"acc_norm": 0.447718210013292, | |
"acc_norm_stderr": 0.007402016937207756 | |
}, | |
"community|arabic_mmlu_ht:professional_law|0": { | |
"acc_norm": 0.38975835733066233, | |
"acc_norm_stderr": 0.005457377366575089 | |
}, | |
"community|arabic_mmlu_ht:professional_medicine|0": { | |
"acc_norm": 0.4868662186927306, | |
"acc_norm_stderr": 0.012357438980600878 | |
}, | |
"community|arabic_mmlu_ht:professional_psychology|0": { | |
"acc_norm": 0.4598915215529546, | |
"acc_norm_stderr": 0.008421900924313276 | |
}, | |
"community|arabic_mmlu_ht:public_relations|0": { | |
"acc_norm": 0.4090909090909091, | |
"acc_norm_stderr": 0.047093069786618966 | |
}, | |
"community|arabic_mmlu_ht:security_studies|0": { | |
"acc_norm": 0.5142857142857142, | |
"acc_norm_stderr": 0.03199615232806287 | |
}, | |
"community|arabic_mmlu_ht:sociology|0": { | |
"acc_norm": 0.5373134328358209, | |
"acc_norm_stderr": 0.03525675167467974 | |
}, | |
"community|arabic_mmlu_ht:us_foreign_policy|0": { | |
"acc_norm": 0.6, | |
"acc_norm_stderr": 0.049236596391733084 | |
}, | |
"community|arabic_mmlu_ht:virology|0": { | |
"acc_norm": 0.3253012048192771, | |
"acc_norm_stderr": 0.03647168523683228 | |
}, | |
"community|arabic_mmlu_ht:world_religions|0": { | |
"acc_norm": 0.45614035087719296, | |
"acc_norm_stderr": 0.03820042586602966 | |
}, | |
"community|madinah_qa:Arabic Language (General)|0": { | |
"acc_norm": 0.6862745098039216, | |
"acc_norm_stderr": 0.01877168389352818 | |
}, | |
"community|madinah_qa:Arabic Language (Grammar)|0": { | |
"acc_norm": 0.4602739726027397, | |
"acc_norm_stderr": 0.02612427204660299 | |
}, | |
"community|aratrust:Ethics|0": { | |
"acc_norm": 0.7, | |
"acc_norm_stderr": 0.05966005392134928 | |
}, | |
"community|aratrust:Illegal|0": { | |
"acc_norm": 0.5471698113207547, | |
"acc_norm_stderr": 0.06902828418342014 | |
}, | |
"community|aratrust:MentalHealth|0": { | |
"acc_norm": 0.8157894736842105, | |
"acc_norm_stderr": 0.044762634463765154 | |
}, | |
"community|aratrust:Offensive|0": { | |
"acc_norm": 0.7246376811594203, | |
"acc_norm_stderr": 0.0541699276519132 | |
}, | |
"community|aratrust:PhysicalHealth|0": { | |
"acc_norm": 0.821917808219178, | |
"acc_norm_stderr": 0.045087711548245445 | |
}, | |
"community|aratrust:Privacy|0": { | |
"acc_norm": 0.7719298245614035, | |
"acc_norm_stderr": 0.05606981784761177 | |
}, | |
"community|aratrust:Trustfulness|0": { | |
"acc_norm": 0.7435897435897436, | |
"acc_norm_stderr": 0.0497609919747403 | |
}, | |
"community|aratrust:Unfairness|0": { | |
"acc_norm": 0.6545454545454545, | |
"acc_norm_stderr": 0.06470956516382614 | |
}, | |
"community|alghafa:_average|0": { | |
"acc_norm": 0.7015523410091615, | |
"acc_norm_stderr": 0.01709196270369165 | |
}, | |
"community|arabic_mmlu:_average|0": { | |
"acc_norm": 0.5527378207460829, | |
"acc_norm_stderr": 0.0365482308307486 | |
}, | |
"community|arabic_mmlu_ht:_average|0": { | |
"acc_norm": 0.4226506866164011, | |
"acc_norm_stderr": 0.03306560150879595 | |
}, | |
"community|madinah_qa:_average|0": { | |
"acc_norm": 0.5732742412033307, | |
"acc_norm_stderr": 0.022447977970065584 | |
}, | |
"community|aratrust:_average|0": { | |
"acc_norm": 0.7224474746350206, | |
"acc_norm_stderr": 0.05540612334435893 | |
}, | |
"all": { | |
"acc_norm": 0.5102408495364906, | |
"acc_norm_stderr": 0.03442269224543721, | |
"llm_as_judge": 0.7575023741690349, | |
"llm_as_judge_stderr": 0.00010440563108429894 | |
}, | |
"community|alrage_qa|0": { | |
"llm_as_judge": 0.7575023741690349, | |
"llm_as_judge_stderr": 0.00010440563108429894 | |
} | |
}, | |
"versions": { | |
"community|alghafa:meta_ar_dialects|0": 0, | |
"community|alghafa:meta_ar_msa|0": 0, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, | |
"community|alghafa:multiple_choice_sentiment_task|0": 0, | |
"community|arabic_exams|0": 0, | |
"community|arabic_mmlu:Accounting (University)|0": 0, | |
"community|arabic_mmlu:Arabic Language (General)|0": 0, | |
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0, | |
"community|arabic_mmlu:Arabic Language (High School)|0": 0, | |
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0, | |
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0, | |
"community|arabic_mmlu:Biology (High School)|0": 0, | |
"community|arabic_mmlu:Civics (High School)|0": 0, | |
"community|arabic_mmlu:Civics (Middle School)|0": 0, | |
"community|arabic_mmlu:Computer Science (High School)|0": 0, | |
"community|arabic_mmlu:Computer Science (Middle School)|0": 0, | |
"community|arabic_mmlu:Computer Science (Primary School)|0": 0, | |
"community|arabic_mmlu:Computer Science (University)|0": 0, | |
"community|arabic_mmlu:Driving Test|0": 0, | |
"community|arabic_mmlu:Economics (High School)|0": 0, | |
"community|arabic_mmlu:Economics (Middle School)|0": 0, | |
"community|arabic_mmlu:Economics (University)|0": 0, | |
"community|arabic_mmlu:General Knowledge|0": 0, | |
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0, | |
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0, | |
"community|arabic_mmlu:Geography (High School)|0": 0, | |
"community|arabic_mmlu:Geography (Middle School)|0": 0, | |
"community|arabic_mmlu:Geography (Primary School)|0": 0, | |
"community|arabic_mmlu:History (High School)|0": 0, | |
"community|arabic_mmlu:History (Middle School)|0": 0, | |
"community|arabic_mmlu:History (Primary School)|0": 0, | |
"community|arabic_mmlu:Islamic Studies|0": 0, | |
"community|arabic_mmlu:Islamic Studies (High School)|0": 0, | |
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0, | |
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0, | |
"community|arabic_mmlu:Law (Professional)|0": 0, | |
"community|arabic_mmlu:Management (University)|0": 0, | |
"community|arabic_mmlu:Math (Primary School)|0": 0, | |
"community|arabic_mmlu:Natural Science (Middle School)|0": 0, | |
"community|arabic_mmlu:Natural Science (Primary School)|0": 0, | |
"community|arabic_mmlu:Philosophy (High School)|0": 0, | |
"community|arabic_mmlu:Physics (High School)|0": 0, | |
"community|arabic_mmlu:Political Science (University)|0": 0, | |
"community|arabic_mmlu:Social Science (Middle School)|0": 0, | |
"community|arabic_mmlu:Social Science (Primary School)|0": 0, | |
"community|arabic_mmlu_ht:abstract_algebra|0": 0, | |
"community|arabic_mmlu_ht:anatomy|0": 0, | |
"community|arabic_mmlu_ht:astronomy|0": 0, | |
"community|arabic_mmlu_ht:business_ethics|0": 0, | |
"community|arabic_mmlu_ht:clinical_knowledge|0": 0, | |
"community|arabic_mmlu_ht:college_biology|0": 0, | |
"community|arabic_mmlu_ht:college_chemistry|0": 0, | |
"community|arabic_mmlu_ht:college_computer_science|0": 0, | |
"community|arabic_mmlu_ht:college_mathematics|0": 0, | |
"community|arabic_mmlu_ht:college_medicine|0": 0, | |
"community|arabic_mmlu_ht:college_physics|0": 0, | |
"community|arabic_mmlu_ht:computer_security|0": 0, | |
"community|arabic_mmlu_ht:conceptual_physics|0": 0, | |
"community|arabic_mmlu_ht:econometrics|0": 0, | |
"community|arabic_mmlu_ht:electrical_engineering|0": 0, | |
"community|arabic_mmlu_ht:elementary_mathematics|0": 0, | |
"community|arabic_mmlu_ht:formal_logic|0": 0, | |
"community|arabic_mmlu_ht:global_facts|0": 0, | |
"community|arabic_mmlu_ht:high_school_biology|0": 0, | |
"community|arabic_mmlu_ht:high_school_chemistry|0": 0, | |
"community|arabic_mmlu_ht:high_school_computer_science|0": 0, | |
"community|arabic_mmlu_ht:high_school_european_history|0": 0, | |
"community|arabic_mmlu_ht:high_school_geography|0": 0, | |
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0, | |
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0, | |
"community|arabic_mmlu_ht:high_school_mathematics|0": 0, | |
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0, | |
"community|arabic_mmlu_ht:high_school_physics|0": 0, | |
"community|arabic_mmlu_ht:high_school_psychology|0": 0, | |
"community|arabic_mmlu_ht:high_school_statistics|0": 0, | |
"community|arabic_mmlu_ht:high_school_us_history|0": 0, | |
"community|arabic_mmlu_ht:high_school_world_history|0": 0, | |
"community|arabic_mmlu_ht:human_aging|0": 0, | |
"community|arabic_mmlu_ht:human_sexuality|0": 0, | |
"community|arabic_mmlu_ht:international_law|0": 0, | |
"community|arabic_mmlu_ht:jurisprudence|0": 0, | |
"community|arabic_mmlu_ht:logical_fallacies|0": 0, | |
"community|arabic_mmlu_ht:machine_learning|0": 0, | |
"community|arabic_mmlu_ht:management|0": 0, | |
"community|arabic_mmlu_ht:marketing|0": 0, | |
"community|arabic_mmlu_ht:medical_genetics|0": 0, | |
"community|arabic_mmlu_ht:miscellaneous|0": 0, | |
"community|arabic_mmlu_ht:moral_disputes|0": 0, | |
"community|arabic_mmlu_ht:moral_scenarios|0": 0, | |
"community|arabic_mmlu_ht:nutrition|0": 0, | |
"community|arabic_mmlu_ht:philosophy|0": 0, | |
"community|arabic_mmlu_ht:prehistory|0": 0, | |
"community|arabic_mmlu_ht:professional_accounting|0": 0, | |
"community|arabic_mmlu_ht:professional_law|0": 0, | |
"community|arabic_mmlu_ht:professional_medicine|0": 0, | |
"community|arabic_mmlu_ht:professional_psychology|0": 0, | |
"community|arabic_mmlu_ht:public_relations|0": 0, | |
"community|arabic_mmlu_ht:security_studies|0": 0, | |
"community|arabic_mmlu_ht:sociology|0": 0, | |
"community|arabic_mmlu_ht:us_foreign_policy|0": 0, | |
"community|arabic_mmlu_ht:virology|0": 0, | |
"community|arabic_mmlu_ht:world_religions|0": 0, | |
"community|aratrust:Ethics|0": 0, | |
"community|aratrust:Illegal|0": 0, | |
"community|aratrust:MentalHealth|0": 0, | |
"community|aratrust:Offensive|0": 0, | |
"community|aratrust:PhysicalHealth|0": 0, | |
"community|aratrust:Privacy|0": 0, | |
"community|aratrust:Trustfulness|0": 0, | |
"community|aratrust:Unfairness|0": 0, | |
"community|madinah_qa:Arabic Language (General)|0": 0, | |
"community|madinah_qa:Arabic Language (Grammar)|0": 0, | |
"community|alrage_qa|0": 0 | |
}, | |
"config_tasks": { | |
"community|alghafa:meta_ar_dialects": { | |
"name": "alghafa:meta_ar_dialects", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_dialects", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5395, | |
"effective_num_docs": 5395, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:meta_ar_msa": { | |
"name": "alghafa:meta_ar_msa", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "meta_ar_msa", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { | |
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 75, | |
"effective_num_docs": 75, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_soqal_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { | |
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 150, | |
"effective_num_docs": 150, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 7995, | |
"effective_num_docs": 7995, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task": { | |
"name": "alghafa:multiple_choice_rating_sentiment_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_rating_sentiment_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 5995, | |
"effective_num_docs": 5995, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task": { | |
"name": "alghafa:multiple_choice_sentiment_task", | |
"prompt_function": "alghafa_pfn", | |
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", | |
"hf_subset": "multiple_choice_sentiment_task", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1720, | |
"effective_num_docs": 1720, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_exams": { | |
"name": "arabic_exams", | |
"prompt_function": "arabic_exams_pfn", | |
"hf_repo": "OALL/Arabic_EXAMS", | |
"hf_subset": "default", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test", | |
"validation" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": "validation", | |
"few_shots_select": "sequential", | |
"generation_size": null, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 537, | |
"effective_num_docs": 537, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Accounting (University)": { | |
"name": "arabic_mmlu:Accounting (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Accounting (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 74, | |
"effective_num_docs": 74, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (General)": { | |
"name": "arabic_mmlu:Arabic Language (General)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (General)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Grammar)": { | |
"name": "arabic_mmlu:Arabic Language (Grammar)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (Grammar)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 365, | |
"effective_num_docs": 365, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (High School)": { | |
"name": "arabic_mmlu:Arabic Language (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 390, | |
"effective_num_docs": 390, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Middle School)": { | |
"name": "arabic_mmlu:Arabic Language (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 27, | |
"effective_num_docs": 27, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Primary School)": { | |
"name": "arabic_mmlu:Arabic Language (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Arabic Language (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 252, | |
"effective_num_docs": 252, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Biology (High School)": { | |
"name": "arabic_mmlu:Biology (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Biology (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1409, | |
"effective_num_docs": 1409, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Civics (High School)": { | |
"name": "arabic_mmlu:Civics (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Civics (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 87, | |
"effective_num_docs": 87, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Civics (Middle School)": { | |
"name": "arabic_mmlu:Civics (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Civics (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 236, | |
"effective_num_docs": 236, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (High School)": { | |
"name": "arabic_mmlu:Computer Science (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 261, | |
"effective_num_docs": 261, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Middle School)": { | |
"name": "arabic_mmlu:Computer Science (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 27, | |
"effective_num_docs": 27, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Primary School)": { | |
"name": "arabic_mmlu:Computer Science (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 190, | |
"effective_num_docs": 190, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (University)": { | |
"name": "arabic_mmlu:Computer Science (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Computer Science (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 64, | |
"effective_num_docs": 64, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Driving Test": { | |
"name": "arabic_mmlu:Driving Test", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Driving Test", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1211, | |
"effective_num_docs": 1211, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Economics (High School)": { | |
"name": "arabic_mmlu:Economics (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Economics (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 360, | |
"effective_num_docs": 360, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Economics (Middle School)": { | |
"name": "arabic_mmlu:Economics (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Economics (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 87, | |
"effective_num_docs": 87, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Economics (University)": { | |
"name": "arabic_mmlu:Economics (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Economics (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 137, | |
"effective_num_docs": 137, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge": { | |
"name": "arabic_mmlu:General Knowledge", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "General Knowledge", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 864, | |
"effective_num_docs": 864, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Middle School)": { | |
"name": "arabic_mmlu:General Knowledge (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "General Knowledge (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 172, | |
"effective_num_docs": 172, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Primary School)": { | |
"name": "arabic_mmlu:General Knowledge (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "General Knowledge (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 162, | |
"effective_num_docs": 162, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Geography (High School)": { | |
"name": "arabic_mmlu:Geography (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Geography (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1038, | |
"effective_num_docs": 1038, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Geography (Middle School)": { | |
"name": "arabic_mmlu:Geography (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Geography (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 272, | |
"effective_num_docs": 272, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Geography (Primary School)": { | |
"name": "arabic_mmlu:Geography (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Geography (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 57, | |
"effective_num_docs": 57, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:History (High School)": { | |
"name": "arabic_mmlu:History (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "History (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 760, | |
"effective_num_docs": 760, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:History (Middle School)": { | |
"name": "arabic_mmlu:History (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "History (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 203, | |
"effective_num_docs": 203, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:History (Primary School)": { | |
"name": "arabic_mmlu:History (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "History (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies": { | |
"name": "arabic_mmlu:Islamic Studies", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 639, | |
"effective_num_docs": 639, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (High School)": { | |
"name": "arabic_mmlu:Islamic Studies (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 334, | |
"effective_num_docs": 334, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Middle School)": { | |
"name": "arabic_mmlu:Islamic Studies (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Primary School)": { | |
"name": "arabic_mmlu:Islamic Studies (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Islamic Studies (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 999, | |
"effective_num_docs": 999, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Law (Professional)": { | |
"name": "arabic_mmlu:Law (Professional)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Law (Professional)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 314, | |
"effective_num_docs": 314, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Management (University)": { | |
"name": "arabic_mmlu:Management (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Management (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 75, | |
"effective_num_docs": 75, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Math (Primary School)": { | |
"name": "arabic_mmlu:Math (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Math (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 409, | |
"effective_num_docs": 409, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Middle School)": { | |
"name": "arabic_mmlu:Natural Science (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Natural Science (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 242, | |
"effective_num_docs": 242, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Primary School)": { | |
"name": "arabic_mmlu:Natural Science (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Natural Science (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 336, | |
"effective_num_docs": 336, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Philosophy (High School)": { | |
"name": "arabic_mmlu:Philosophy (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Philosophy (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 39, | |
"effective_num_docs": 39, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Physics (High School)": { | |
"name": "arabic_mmlu:Physics (High School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Physics (High School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 255, | |
"effective_num_docs": 255, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Political Science (University)": { | |
"name": "arabic_mmlu:Political Science (University)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Political Science (University)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Middle School)": { | |
"name": "arabic_mmlu:Social Science (Middle School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Social Science (Middle School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 241, | |
"effective_num_docs": 241, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Primary School)": { | |
"name": "arabic_mmlu:Social Science (Primary School)", | |
"prompt_function": "arabic_mmlu_pfn", | |
"hf_repo": "MBZUAI/ArabicMMLU", | |
"hf_subset": "Social Science (Primary School)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 705, | |
"effective_num_docs": 705, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:abstract_algebra": { | |
"name": "arabic_mmlu_ht:abstract_algebra", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "abstract_algebra", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:anatomy": { | |
"name": "arabic_mmlu_ht:anatomy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "anatomy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 135, | |
"effective_num_docs": 135, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:astronomy": { | |
"name": "arabic_mmlu_ht:astronomy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "astronomy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 152, | |
"effective_num_docs": 152, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:business_ethics": { | |
"name": "arabic_mmlu_ht:business_ethics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "business_ethics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:clinical_knowledge": { | |
"name": "arabic_mmlu_ht:clinical_knowledge", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "clinical_knowledge", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 265, | |
"effective_num_docs": 265, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_biology": { | |
"name": "arabic_mmlu_ht:college_biology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_biology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 144, | |
"effective_num_docs": 144, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_chemistry": { | |
"name": "arabic_mmlu_ht:college_chemistry", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_chemistry", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_computer_science": { | |
"name": "arabic_mmlu_ht:college_computer_science", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_computer_science", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_mathematics": { | |
"name": "arabic_mmlu_ht:college_mathematics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_mathematics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_medicine": { | |
"name": "arabic_mmlu_ht:college_medicine", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_medicine", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 173, | |
"effective_num_docs": 173, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:college_physics": { | |
"name": "arabic_mmlu_ht:college_physics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "college_physics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 102, | |
"effective_num_docs": 102, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:computer_security": { | |
"name": "arabic_mmlu_ht:computer_security", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "computer_security", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:conceptual_physics": { | |
"name": "arabic_mmlu_ht:conceptual_physics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "conceptual_physics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:econometrics": { | |
"name": "arabic_mmlu_ht:econometrics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "econometrics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 114, | |
"effective_num_docs": 114, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:electrical_engineering": { | |
"name": "arabic_mmlu_ht:electrical_engineering", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "electrical_engineering", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 145, | |
"effective_num_docs": 145, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:elementary_mathematics": { | |
"name": "arabic_mmlu_ht:elementary_mathematics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "elementary_mathematics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 378, | |
"effective_num_docs": 378, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:formal_logic": { | |
"name": "arabic_mmlu_ht:formal_logic", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "formal_logic", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 126, | |
"effective_num_docs": 126, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:global_facts": { | |
"name": "arabic_mmlu_ht:global_facts", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "global_facts", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_biology": { | |
"name": "arabic_mmlu_ht:high_school_biology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_biology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 3813, | |
"effective_num_docs": 3813, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_chemistry": { | |
"name": "arabic_mmlu_ht:high_school_chemistry", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_chemistry", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4016, | |
"effective_num_docs": 4016, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_computer_science": { | |
"name": "arabic_mmlu_ht:high_school_computer_science", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_computer_science", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_european_history": { | |
"name": "arabic_mmlu_ht:high_school_european_history", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_european_history", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 8152, | |
"effective_num_docs": 8152, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_geography": { | |
"name": "arabic_mmlu_ht:high_school_geography", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_geography", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 198, | |
"effective_num_docs": 198, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_government_and_politics": { | |
"name": "arabic_mmlu_ht:high_school_government_and_politics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_government_and_politics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 193, | |
"effective_num_docs": 193, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_macroeconomics": { | |
"name": "arabic_mmlu_ht:high_school_macroeconomics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_macroeconomics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2891, | |
"effective_num_docs": 2891, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_mathematics": { | |
"name": "arabic_mmlu_ht:high_school_mathematics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_mathematics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 270, | |
"effective_num_docs": 270, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_microeconomics": { | |
"name": "arabic_mmlu_ht:high_school_microeconomics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_microeconomics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 238, | |
"effective_num_docs": 238, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_physics": { | |
"name": "arabic_mmlu_ht:high_school_physics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_physics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 151, | |
"effective_num_docs": 151, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_psychology": { | |
"name": "arabic_mmlu_ht:high_school_psychology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_psychology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 545, | |
"effective_num_docs": 545, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_statistics": { | |
"name": "arabic_mmlu_ht:high_school_statistics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_statistics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4232, | |
"effective_num_docs": 4232, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_us_history": { | |
"name": "arabic_mmlu_ht:high_school_us_history", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_us_history", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 204, | |
"effective_num_docs": 204, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_world_history": { | |
"name": "arabic_mmlu_ht:high_school_world_history", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "high_school_world_history", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 237, | |
"effective_num_docs": 237, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:human_aging": { | |
"name": "arabic_mmlu_ht:human_aging", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "human_aging", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 223, | |
"effective_num_docs": 223, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:human_sexuality": { | |
"name": "arabic_mmlu_ht:human_sexuality", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "human_sexuality", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 131, | |
"effective_num_docs": 131, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:international_law": { | |
"name": "arabic_mmlu_ht:international_law", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "international_law", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 121, | |
"effective_num_docs": 121, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:jurisprudence": { | |
"name": "arabic_mmlu_ht:jurisprudence", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "jurisprudence", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 108, | |
"effective_num_docs": 108, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:logical_fallacies": { | |
"name": "arabic_mmlu_ht:logical_fallacies", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "logical_fallacies", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 163, | |
"effective_num_docs": 163, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:machine_learning": { | |
"name": "arabic_mmlu_ht:machine_learning", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "machine_learning", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 112, | |
"effective_num_docs": 112, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:management": { | |
"name": "arabic_mmlu_ht:management", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "management", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 103, | |
"effective_num_docs": 103, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:marketing": { | |
"name": "arabic_mmlu_ht:marketing", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "marketing", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 234, | |
"effective_num_docs": 234, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:medical_genetics": { | |
"name": "arabic_mmlu_ht:medical_genetics", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "medical_genetics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:miscellaneous": { | |
"name": "arabic_mmlu_ht:miscellaneous", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "miscellaneous", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2420, | |
"effective_num_docs": 2420, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_disputes": { | |
"name": "arabic_mmlu_ht:moral_disputes", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "moral_disputes", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 346, | |
"effective_num_docs": 346, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_scenarios": { | |
"name": "arabic_mmlu_ht:moral_scenarios", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "moral_scenarios", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 895, | |
"effective_num_docs": 895, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:nutrition": { | |
"name": "arabic_mmlu_ht:nutrition", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "nutrition", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:philosophy": { | |
"name": "arabic_mmlu_ht:philosophy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "philosophy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 311, | |
"effective_num_docs": 311, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:prehistory": { | |
"name": "arabic_mmlu_ht:prehistory", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "prehistory", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 324, | |
"effective_num_docs": 324, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_accounting": { | |
"name": "arabic_mmlu_ht:professional_accounting", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_accounting", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 4514, | |
"effective_num_docs": 4514, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_law": { | |
"name": "arabic_mmlu_ht:professional_law", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_law", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 7987, | |
"effective_num_docs": 7987, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_medicine": { | |
"name": "arabic_mmlu_ht:professional_medicine", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_medicine", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 1637, | |
"effective_num_docs": 1637, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_psychology": { | |
"name": "arabic_mmlu_ht:professional_psychology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "professional_psychology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 3503, | |
"effective_num_docs": 3503, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:public_relations": { | |
"name": "arabic_mmlu_ht:public_relations", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "public_relations", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 110, | |
"effective_num_docs": 110, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:security_studies": { | |
"name": "arabic_mmlu_ht:security_studies", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "security_studies", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 245, | |
"effective_num_docs": 245, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:sociology": { | |
"name": "arabic_mmlu_ht:sociology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "sociology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 201, | |
"effective_num_docs": 201, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:us_foreign_policy": { | |
"name": "arabic_mmlu_ht:us_foreign_policy", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "us_foreign_policy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 100, | |
"effective_num_docs": 100, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:virology": { | |
"name": "arabic_mmlu_ht:virology", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "virology", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 166, | |
"effective_num_docs": 166, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|arabic_mmlu_ht:world_religions": { | |
"name": "arabic_mmlu_ht:world_religions", | |
"prompt_function": "arabic_mmlu_ht_pfn", | |
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", | |
"hf_subset": "world_religions", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 171, | |
"effective_num_docs": 171, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Ethics": { | |
"name": "aratrust:Ethics", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Ethics", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 60, | |
"effective_num_docs": 60, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Illegal": { | |
"name": "aratrust:Illegal", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Illegal", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 53, | |
"effective_num_docs": 53, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:MentalHealth": { | |
"name": "aratrust:MentalHealth", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "MentalHealth", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 76, | |
"effective_num_docs": 76, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Offensive": { | |
"name": "aratrust:Offensive", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Offensive", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 69, | |
"effective_num_docs": 69, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:PhysicalHealth": { | |
"name": "aratrust:PhysicalHealth", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "PhysicalHealth", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 73, | |
"effective_num_docs": 73, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Privacy": { | |
"name": "aratrust:Privacy", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Privacy", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 57, | |
"effective_num_docs": 57, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Trustfulness": { | |
"name": "aratrust:Trustfulness", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Trustfulness", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 78, | |
"effective_num_docs": 78, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|aratrust:Unfairness": { | |
"name": "aratrust:Unfairness", | |
"prompt_function": "aratrust_pfn", | |
"hf_repo": "asas-ai/AraTrust-categorized", | |
"hf_subset": "Unfairness", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 55, | |
"effective_num_docs": 55, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|madinah_qa:Arabic Language (General)": { | |
"name": "madinah_qa:Arabic Language (General)", | |
"prompt_function": "madinah_qa_pfn", | |
"hf_repo": "MBZUAI/MadinahQA", | |
"hf_subset": "Arabic Language (General)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 612, | |
"effective_num_docs": 612, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|madinah_qa:Arabic Language (Grammar)": { | |
"name": "madinah_qa:Arabic Language (Grammar)", | |
"prompt_function": "madinah_qa_pfn", | |
"hf_repo": "MBZUAI/MadinahQA", | |
"hf_subset": "Arabic Language (Grammar)", | |
"metric": [ | |
{ | |
"metric_name": "acc_norm", | |
"higher_is_better": true, | |
"category": "8", | |
"use_case": "1", | |
"sample_level_fn": "compute", | |
"corpus_level_fn": "mean" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"test" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": [ | |
"dev" | |
], | |
"few_shots_select": "sequential", | |
"generation_size": -1, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 365, | |
"effective_num_docs": 365, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
}, | |
"community|alrage_qa": { | |
"name": "alrage_qa", | |
"prompt_function": "qa_prompt_arabic", | |
"hf_repo": "OALL/ALRAGE", | |
"hf_subset": null, | |
"metric": [ | |
{ | |
"metric_name": "llm_as_judge", | |
"higher_is_better": true, | |
"category": "7", | |
"use_case": "10", | |
"sample_level_fn": "_sample_level_fn", | |
"corpus_level_fn": "aggregate_scores" | |
} | |
], | |
"hf_revision": null, | |
"hf_filter": null, | |
"hf_avail_splits": [ | |
"train" | |
], | |
"trust_dataset": true, | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 200, | |
"generation_grammar": null, | |
"stop_sequence": [], | |
"num_samples": null, | |
"suite": [ | |
"community" | |
], | |
"original_num_docs": 2106, | |
"effective_num_docs": 2106, | |
"must_remove_duplicate_docs": false, | |
"version": 0 | |
} | |
}, | |
"summary_tasks": { | |
"community|alghafa:meta_ar_dialects|0": { | |
"hashes": { | |
"hash_examples": "c0b6081f83e14064", | |
"hash_full_prompts": "abd57794842153f6", | |
"hash_input_tokens": "444c7ed35a4e3462", | |
"hash_cont_tokens": "0787d6e25363632e" | |
}, | |
"truncated": 0, | |
"non_truncated": 5395, | |
"padded": 21519, | |
"non_padded": 61, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:meta_ar_msa|0": { | |
"hashes": { | |
"hash_examples": "64eb78a7c5b7484b", | |
"hash_full_prompts": "26901ec5bd974a59", | |
"hash_input_tokens": "5841dd7c4be18019", | |
"hash_cont_tokens": "9419419f52941664" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3552, | |
"non_padded": 28, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { | |
"hashes": { | |
"hash_examples": "54fc3502c1c02c06", | |
"hash_full_prompts": "fc42cc036fc4fd00", | |
"hash_input_tokens": "e095d5f4cf2c60e5", | |
"hash_cont_tokens": "6b8a184674b81d9e" | |
}, | |
"truncated": 0, | |
"non_truncated": 75, | |
"padded": 150, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { | |
"hashes": { | |
"hash_examples": "46572d83696552ae", | |
"hash_full_prompts": "e087a1b58bdf75b4", | |
"hash_input_tokens": "d226b976ca929e8e", | |
"hash_cont_tokens": "081e673a9b5034d1" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 750, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { | |
"hashes": { | |
"hash_examples": "f430d97ff715bc1c", | |
"hash_full_prompts": "0756916da346d3f7", | |
"hash_input_tokens": "836e7f414e9e45a6", | |
"hash_cont_tokens": "081e673a9b5034d1" | |
}, | |
"truncated": 0, | |
"non_truncated": 150, | |
"padded": 740, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { | |
"hashes": { | |
"hash_examples": "6b70a7416584f98c", | |
"hash_full_prompts": "7977035c80bcdaef", | |
"hash_input_tokens": "daa42905c523ea5d", | |
"hash_cont_tokens": "fbec465fdd3933cd" | |
}, | |
"truncated": 0, | |
"non_truncated": 7995, | |
"padded": 15656, | |
"non_padded": 334, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_rating_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "bc2005cc9d2f436e", | |
"hash_full_prompts": "63e49698cce690eb", | |
"hash_input_tokens": "08ed78e4291608cc", | |
"hash_cont_tokens": "4a2b3f8ddebfb48d" | |
}, | |
"truncated": 0, | |
"non_truncated": 5995, | |
"padded": 17296, | |
"non_padded": 689, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alghafa:multiple_choice_sentiment_task|0": { | |
"hashes": { | |
"hash_examples": "6fb0e254ea5945d8", | |
"hash_full_prompts": "7d6546965445f4d2", | |
"hash_input_tokens": "f629b5a2c7275499", | |
"hash_cont_tokens": "edaa2e057c5deb01" | |
}, | |
"truncated": 0, | |
"non_truncated": 1720, | |
"padded": 4962, | |
"non_padded": 198, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_exams|0": { | |
"hashes": { | |
"hash_examples": "6d721df351722656", | |
"hash_full_prompts": "499216b4a313b9df", | |
"hash_input_tokens": "e12ad71a7221c658", | |
"hash_cont_tokens": "a4ef65bc7bab8dcf" | |
}, | |
"truncated": 0, | |
"non_truncated": 537, | |
"padded": 2096, | |
"non_padded": 52, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Accounting (University)|0": { | |
"hashes": { | |
"hash_examples": "30e09697562ff9e7", | |
"hash_full_prompts": "def6440eb1ab52e8", | |
"hash_input_tokens": "24ba91162c0c9f96", | |
"hash_cont_tokens": "587bf4caea1658f4" | |
}, | |
"truncated": 0, | |
"non_truncated": 74, | |
"padded": 256, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (General)|0": { | |
"hashes": { | |
"hash_examples": "bef69fb8b3b75f28", | |
"hash_full_prompts": "fc03d402a08bc0d8", | |
"hash_input_tokens": "f671d300f9135224", | |
"hash_cont_tokens": "95a234c727b7b43c" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2400, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Grammar)|0": { | |
"hashes": { | |
"hash_examples": "bd066a9e6a140a4b", | |
"hash_full_prompts": "ac0e5261635a0e26", | |
"hash_input_tokens": "6fcffc316bc8783a", | |
"hash_cont_tokens": "98f874e7446c544b" | |
}, | |
"truncated": 0, | |
"non_truncated": 365, | |
"padded": 1545, | |
"non_padded": 43, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (High School)|0": { | |
"hashes": { | |
"hash_examples": "a9c2cd9a9929292a", | |
"hash_full_prompts": "25fd0632975c5177", | |
"hash_input_tokens": "e0f16e5cb1554827", | |
"hash_cont_tokens": "3d676d0d2f081e05" | |
}, | |
"truncated": 0, | |
"non_truncated": 390, | |
"padded": 1505, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "2f8a77bbbd0e21ff", | |
"hash_full_prompts": "15573ec49f03e77e", | |
"hash_input_tokens": "f67ccfcd7005593b", | |
"hash_cont_tokens": "322ea7667dfc2c2d" | |
}, | |
"truncated": 0, | |
"non_truncated": 27, | |
"padded": 105, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Arabic Language (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "5eed3da47822539b", | |
"hash_full_prompts": "c4699031f150a5fa", | |
"hash_input_tokens": "27fd8ff84aa382c7", | |
"hash_cont_tokens": "f3c78f80ddea1519" | |
}, | |
"truncated": 0, | |
"non_truncated": 252, | |
"padded": 918, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Biology (High School)|0": { | |
"hashes": { | |
"hash_examples": "91ae6d22a0f0213d", | |
"hash_full_prompts": "8e8334c7ba8ed60f", | |
"hash_input_tokens": "23035e39aeca68d6", | |
"hash_cont_tokens": "aaa20fdc3c06d2c3" | |
}, | |
"truncated": 0, | |
"non_truncated": 1409, | |
"padded": 4968, | |
"non_padded": 88, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Civics (High School)|0": { | |
"hashes": { | |
"hash_examples": "f27bf8791bea2bb9", | |
"hash_full_prompts": "d9ecde3c28f611fe", | |
"hash_input_tokens": "9257770457808792", | |
"hash_cont_tokens": "e02c7ebfec7f8df8" | |
}, | |
"truncated": 0, | |
"non_truncated": 87, | |
"padded": 312, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Civics (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "74f5bb0098c8916f", | |
"hash_full_prompts": "f4f2ab6438206e9c", | |
"hash_input_tokens": "a436d5ab8a0bd9bd", | |
"hash_cont_tokens": "1ffdd9a463183bfa" | |
}, | |
"truncated": 0, | |
"non_truncated": 236, | |
"padded": 940, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (High School)|0": { | |
"hashes": { | |
"hash_examples": "a4278d7b525d46fe", | |
"hash_full_prompts": "5912af94560b0be8", | |
"hash_input_tokens": "d82c24bdc123d6dc", | |
"hash_cont_tokens": "821feca3d9004c98" | |
}, | |
"truncated": 0, | |
"non_truncated": 261, | |
"padded": 994, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "0cb6c07e4b80dfd4", | |
"hash_full_prompts": "12a98150ceb26734", | |
"hash_input_tokens": "6a56579a6db91432", | |
"hash_cont_tokens": "8b4f299b6f012a83" | |
}, | |
"truncated": 0, | |
"non_truncated": 27, | |
"padded": 100, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "d96fc1bc32473533", | |
"hash_full_prompts": "bc8d2e62e0e2bdef", | |
"hash_input_tokens": "5786e690e0732ff6", | |
"hash_cont_tokens": "1bc67f97b48b9ece" | |
}, | |
"truncated": 0, | |
"non_truncated": 190, | |
"padded": 476, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Computer Science (University)|0": { | |
"hashes": { | |
"hash_examples": "8835587e436cbaff", | |
"hash_full_prompts": "d62b0c234ff147cc", | |
"hash_input_tokens": "9bb3d76212e33e0d", | |
"hash_cont_tokens": "e9d871459bc85f62" | |
}, | |
"truncated": 0, | |
"non_truncated": 64, | |
"padded": 247, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Driving Test|0": { | |
"hashes": { | |
"hash_examples": "7a4c38a2c451d075", | |
"hash_full_prompts": "213e7de98e4730a0", | |
"hash_input_tokens": "31c8b955b2808793", | |
"hash_cont_tokens": "cd411982b0f12d43" | |
}, | |
"truncated": 0, | |
"non_truncated": 1211, | |
"padded": 3606, | |
"non_padded": 79, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Economics (High School)|0": { | |
"hashes": { | |
"hash_examples": "c04c252836601279", | |
"hash_full_prompts": "8af6b4d8c6e96660", | |
"hash_input_tokens": "a394466d30e9ccad", | |
"hash_cont_tokens": "4bda66df90f2d4d8" | |
}, | |
"truncated": 0, | |
"non_truncated": 360, | |
"padded": 1374, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Economics (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "18fba1579406b3cc", | |
"hash_full_prompts": "6f64a10c95546611", | |
"hash_input_tokens": "0016e1389c447efd", | |
"hash_cont_tokens": "3ea283b0f50a72f5" | |
}, | |
"truncated": 0, | |
"non_truncated": 87, | |
"padded": 344, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Economics (University)|0": { | |
"hashes": { | |
"hash_examples": "7c9e86fba8151562", | |
"hash_full_prompts": "9db5197a57858c56", | |
"hash_input_tokens": "cfc7923ae4860f06", | |
"hash_cont_tokens": "91cdb256248a5bdf" | |
}, | |
"truncated": 0, | |
"non_truncated": 137, | |
"padded": 532, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge|0": { | |
"hashes": { | |
"hash_examples": "acfbe4e1f0314b85", | |
"hash_full_prompts": "d2eade89398b1e10", | |
"hash_input_tokens": "27cc716b484aed90", | |
"hash_cont_tokens": "76d704fbedbe5ab8" | |
}, | |
"truncated": 0, | |
"non_truncated": 864, | |
"padded": 3169, | |
"non_padded": 44, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "03cd0ecf10224316", | |
"hash_full_prompts": "5cc9f89abeb8e9c9", | |
"hash_input_tokens": "4e8778e8fd57b61e", | |
"hash_cont_tokens": "aff2aed9268be2e2" | |
}, | |
"truncated": 0, | |
"non_truncated": 172, | |
"padded": 607, | |
"non_padded": 21, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:General Knowledge (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "c3ee30196e05e122", | |
"hash_full_prompts": "78f2ee5265df84d8", | |
"hash_input_tokens": "06b5241d7706e0b8", | |
"hash_cont_tokens": "6c8978669cdc11fb" | |
}, | |
"truncated": 0, | |
"non_truncated": 162, | |
"padded": 629, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Geography (High School)|0": { | |
"hashes": { | |
"hash_examples": "e2e329d2bdd9fb7b", | |
"hash_full_prompts": "d0399f602dd9aa0d", | |
"hash_input_tokens": "8cf1c8dc75c6b202", | |
"hash_cont_tokens": "37e2e9c548d8c904" | |
}, | |
"truncated": 0, | |
"non_truncated": 1038, | |
"padded": 4052, | |
"non_padded": 64, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Geography (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "420b161444291989", | |
"hash_full_prompts": "5cffacb66cf662dc", | |
"hash_input_tokens": "bd8c3b7905fbb441", | |
"hash_cont_tokens": "5e24bb4c8be23901" | |
}, | |
"truncated": 0, | |
"non_truncated": 272, | |
"padded": 966, | |
"non_padded": 9, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Geography (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "5bc5ca48a4210899", | |
"hash_full_prompts": "6375dc7360352e84", | |
"hash_input_tokens": "8213140a7ec6ec1f", | |
"hash_cont_tokens": "b9f45957a97d1ecf" | |
}, | |
"truncated": 0, | |
"non_truncated": 57, | |
"padded": 216, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:History (High School)|0": { | |
"hashes": { | |
"hash_examples": "c7cc37f29311bea1", | |
"hash_full_prompts": "0cd18df8918b6810", | |
"hash_input_tokens": "7267cc5457822baa", | |
"hash_cont_tokens": "b16e65544485acae" | |
}, | |
"truncated": 0, | |
"non_truncated": 760, | |
"padded": 2886, | |
"non_padded": 76, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:History (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "5b9f1973337153a2", | |
"hash_full_prompts": "57dd1cbbb8d81b12", | |
"hash_input_tokens": "cc288fa912516a3b", | |
"hash_cont_tokens": "e3b355a58a286ee4" | |
}, | |
"truncated": 0, | |
"non_truncated": 203, | |
"padded": 734, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:History (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "af2469847007c1fe", | |
"hash_full_prompts": "e4553d4f8b5ce91e", | |
"hash_input_tokens": "caea3435c5cc7a5a", | |
"hash_cont_tokens": "a52a22630c3cb3f7" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies|0": { | |
"hashes": { | |
"hash_examples": "c8da9b2f16a5ea0f", | |
"hash_full_prompts": "09e27682dc0ead0e", | |
"hash_input_tokens": "975752785ecac4a3", | |
"hash_cont_tokens": "1866597a67ff4424" | |
}, | |
"truncated": 0, | |
"non_truncated": 639, | |
"padded": 2493, | |
"non_padded": 36, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (High School)|0": { | |
"hashes": { | |
"hash_examples": "efb11bc8ef398117", | |
"hash_full_prompts": "fe752769668c1009", | |
"hash_input_tokens": "02f0b6cb21235e3b", | |
"hash_cont_tokens": "6b678abb2fd451bd" | |
}, | |
"truncated": 0, | |
"non_truncated": 334, | |
"padded": 1281, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "9e33ab030eebdb99", | |
"hash_full_prompts": "77f65dc3dd8a829d", | |
"hash_input_tokens": "19f9a657cbe0859f", | |
"hash_cont_tokens": "e0c922e595ad51cd" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 867, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "4167565d878b20eb", | |
"hash_full_prompts": "03839fe4e2cac919", | |
"hash_input_tokens": "0bca918afc8789e1", | |
"hash_cont_tokens": "97e2182a63c5686c" | |
}, | |
"truncated": 0, | |
"non_truncated": 999, | |
"padded": 2969, | |
"non_padded": 55, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Law (Professional)|0": { | |
"hashes": { | |
"hash_examples": "e77f52c8fe4352b3", | |
"hash_full_prompts": "bf6d7dd46da99ec9", | |
"hash_input_tokens": "6ff740a4b5e5fd62", | |
"hash_cont_tokens": "324cc46c561b417c" | |
}, | |
"truncated": 0, | |
"non_truncated": 314, | |
"padded": 1223, | |
"non_padded": 9, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Management (University)|0": { | |
"hashes": { | |
"hash_examples": "09682649b04b7327", | |
"hash_full_prompts": "b8f8378cbb8858ac", | |
"hash_input_tokens": "b0ef3d58e896d82a", | |
"hash_cont_tokens": "1e98e1e2cd19a5e3" | |
}, | |
"truncated": 0, | |
"non_truncated": 75, | |
"padded": 200, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Math (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "edb027bfae7e76f1", | |
"hash_full_prompts": "6fa9ed8864bb5260", | |
"hash_input_tokens": "f6bd3806b5e36a3a", | |
"hash_cont_tokens": "632401a080490684" | |
}, | |
"truncated": 0, | |
"non_truncated": 409, | |
"padded": 1290, | |
"non_padded": 6, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "96e72c9094c2364c", | |
"hash_full_prompts": "43d67a3752a4e748", | |
"hash_input_tokens": "aced22b7e2121718", | |
"hash_cont_tokens": "17e42af5dbb9eee1" | |
}, | |
"truncated": 0, | |
"non_truncated": 242, | |
"padded": 924, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Natural Science (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "69e35bad3dec5a4d", | |
"hash_full_prompts": "b97876d536b57b21", | |
"hash_input_tokens": "910c69ba2bf146a0", | |
"hash_cont_tokens": "a7423721c9837336" | |
}, | |
"truncated": 0, | |
"non_truncated": 336, | |
"padded": 1206, | |
"non_padded": 22, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Philosophy (High School)|0": { | |
"hashes": { | |
"hash_examples": "dc6ebd484a02fca5", | |
"hash_full_prompts": "db670d86ac726721", | |
"hash_input_tokens": "21eea9729777fc7e", | |
"hash_cont_tokens": "69b31fc6977897bf" | |
}, | |
"truncated": 0, | |
"non_truncated": 39, | |
"padded": 156, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Physics (High School)|0": { | |
"hashes": { | |
"hash_examples": "58a1722472c9e644", | |
"hash_full_prompts": "3c2cb36f55a8a961", | |
"hash_input_tokens": "308bb33e988f4cdd", | |
"hash_cont_tokens": "f9506aa86f66954d" | |
}, | |
"truncated": 0, | |
"non_truncated": 255, | |
"padded": 996, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Political Science (University)|0": { | |
"hashes": { | |
"hash_examples": "07a4ed6aabbdfd1e", | |
"hash_full_prompts": "f91c571b1a302600", | |
"hash_input_tokens": "de5787162e4a9890", | |
"hash_cont_tokens": "4799b66f49438465" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 688, | |
"non_padded": 22, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Middle School)|0": { | |
"hashes": { | |
"hash_examples": "8ca955902f304664", | |
"hash_full_prompts": "29dbd29e6fe4f744", | |
"hash_input_tokens": "75708d5cc4092063", | |
"hash_cont_tokens": "4602cb88db99312d" | |
}, | |
"truncated": 0, | |
"non_truncated": 241, | |
"padded": 919, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu:Social Science (Primary School)|0": { | |
"hashes": { | |
"hash_examples": "934025ab3738123c", | |
"hash_full_prompts": "8acda215da105f16", | |
"hash_input_tokens": "c6a09e79cc98ea8a", | |
"hash_cont_tokens": "19e973e9f05c9c82" | |
}, | |
"truncated": 0, | |
"non_truncated": 705, | |
"padded": 2004, | |
"non_padded": 39, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:abstract_algebra|0": { | |
"hashes": { | |
"hash_examples": "0b557911f2f6d919", | |
"hash_full_prompts": "4c8314ebe7adc209", | |
"hash_input_tokens": "03ca854183d82a17", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:anatomy|0": { | |
"hashes": { | |
"hash_examples": "a552d8a0ef294061", | |
"hash_full_prompts": "792190fca7bb54f9", | |
"hash_input_tokens": "dc6e5d383cd9600e", | |
"hash_cont_tokens": "96c000fa61c3bd55" | |
}, | |
"truncated": 0, | |
"non_truncated": 135, | |
"padded": 532, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:astronomy|0": { | |
"hashes": { | |
"hash_examples": "c4a372d0af7da098", | |
"hash_full_prompts": "18697b6ec79d251b", | |
"hash_input_tokens": "e7eb75e9590a4f37", | |
"hash_cont_tokens": "b13cc32205751d90" | |
}, | |
"truncated": 0, | |
"non_truncated": 152, | |
"padded": 604, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:business_ethics|0": { | |
"hashes": { | |
"hash_examples": "9f71d816abf8af7a", | |
"hash_full_prompts": "cdf39362e7a4c282", | |
"hash_input_tokens": "ec2c06d40ccbfd5d", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:clinical_knowledge|0": { | |
"hashes": { | |
"hash_examples": "38303cd765589ef3", | |
"hash_full_prompts": "4916d5b3e6fa5747", | |
"hash_input_tokens": "7f014e95fc56731c", | |
"hash_cont_tokens": "c771582839d4f30c" | |
}, | |
"truncated": 0, | |
"non_truncated": 265, | |
"padded": 1028, | |
"non_padded": 32, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_biology|0": { | |
"hashes": { | |
"hash_examples": "dbd9b5d318e60b04", | |
"hash_full_prompts": "25af6800e1994aca", | |
"hash_input_tokens": "91e053330e1d0ca0", | |
"hash_cont_tokens": "ec774ac0d0ad658b" | |
}, | |
"truncated": 0, | |
"non_truncated": 144, | |
"padded": 572, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_chemistry|0": { | |
"hashes": { | |
"hash_examples": "6f88491d03db8a4c", | |
"hash_full_prompts": "6e44d3469e37e2e1", | |
"hash_input_tokens": "4c81c2ec5e64aba6", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_computer_science|0": { | |
"hashes": { | |
"hash_examples": "ebfdee5ef2ed5e17", | |
"hash_full_prompts": "3e307548735b2920", | |
"hash_input_tokens": "66ff25e7cccdf433", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_mathematics|0": { | |
"hashes": { | |
"hash_examples": "e3f22cd7712aae2f", | |
"hash_full_prompts": "0c2b29e8eb73a7f3", | |
"hash_input_tokens": "67c170584808d053", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_medicine|0": { | |
"hashes": { | |
"hash_examples": "51a5501373afb5a7", | |
"hash_full_prompts": "2593c8e3e7370b8f", | |
"hash_input_tokens": "4236849af4c01bc7", | |
"hash_cont_tokens": "1823a754e6394181" | |
}, | |
"truncated": 0, | |
"non_truncated": 173, | |
"padded": 680, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:college_physics|0": { | |
"hashes": { | |
"hash_examples": "2d3e015989b108db", | |
"hash_full_prompts": "501db5ec44eddf6f", | |
"hash_input_tokens": "7a1e669f26978789", | |
"hash_cont_tokens": "ee5dc873d27b9e10" | |
}, | |
"truncated": 0, | |
"non_truncated": 102, | |
"padded": 404, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:computer_security|0": { | |
"hashes": { | |
"hash_examples": "f8810eddc38dfee4", | |
"hash_full_prompts": "5dc1461b506ec945", | |
"hash_input_tokens": "fb58215b0c2a5a60", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 396, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:conceptual_physics|0": { | |
"hashes": { | |
"hash_examples": "211e32cc43c6b1dc", | |
"hash_full_prompts": "7a6a81372e205ce8", | |
"hash_input_tokens": "d8d938ed9a463457", | |
"hash_cont_tokens": "b7b580bbcf7e0afa" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 896, | |
"non_padded": 44, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:econometrics|0": { | |
"hashes": { | |
"hash_examples": "810023786b2484d2", | |
"hash_full_prompts": "dc53863784bf74c0", | |
"hash_input_tokens": "65a2dfeef3d77a7b", | |
"hash_cont_tokens": "d44932b2a931e093" | |
}, | |
"truncated": 0, | |
"non_truncated": 114, | |
"padded": 452, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:electrical_engineering|0": { | |
"hashes": { | |
"hash_examples": "a222760c93eaa1ee", | |
"hash_full_prompts": "c41ebc8121330b1f", | |
"hash_input_tokens": "9739ba5f787de1d2", | |
"hash_cont_tokens": "159f4cb1232d2a3c" | |
}, | |
"truncated": 0, | |
"non_truncated": 145, | |
"padded": 544, | |
"non_padded": 36, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:elementary_mathematics|0": { | |
"hashes": { | |
"hash_examples": "4c069aeee64dc227", | |
"hash_full_prompts": "0d9069fce1449bad", | |
"hash_input_tokens": "3bae1815c23ee105", | |
"hash_cont_tokens": "2bf44b70baf49dfa" | |
}, | |
"truncated": 0, | |
"non_truncated": 378, | |
"padded": 1500, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:formal_logic|0": { | |
"hashes": { | |
"hash_examples": "3cb0ccbf8e8a77ae", | |
"hash_full_prompts": "a58ff93a7a58c7cd", | |
"hash_input_tokens": "9f671e83d0d48ad6", | |
"hash_cont_tokens": "8765c45f4711ebb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 126, | |
"padded": 504, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:global_facts|0": { | |
"hashes": { | |
"hash_examples": "c1d039e64ea321b9", | |
"hash_full_prompts": "107d6c15bef76db1", | |
"hash_input_tokens": "0d77fd8fdfbe471c", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 388, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_biology|0": { | |
"hashes": { | |
"hash_examples": "ddcb8237bb4ba08a", | |
"hash_full_prompts": "1771dde0eb173d3e", | |
"hash_input_tokens": "04651b18a5a09da4", | |
"hash_cont_tokens": "49908817551a4513" | |
}, | |
"truncated": 0, | |
"non_truncated": 3813, | |
"padded": 15100, | |
"non_padded": 152, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_chemistry|0": { | |
"hashes": { | |
"hash_examples": "07061b55c5c436d9", | |
"hash_full_prompts": "eef1a22cbdff26f0", | |
"hash_input_tokens": "9ae75582bb728c7b", | |
"hash_cont_tokens": "a7f16a586e1cfe0f" | |
}, | |
"truncated": 0, | |
"non_truncated": 4016, | |
"padded": 15912, | |
"non_padded": 152, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_computer_science|0": { | |
"hashes": { | |
"hash_examples": "8d3405483d5fdcff", | |
"hash_full_prompts": "c7938870e5a97c73", | |
"hash_input_tokens": "f5583292a62accae", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 400, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_european_history|0": { | |
"hashes": { | |
"hash_examples": "031c49a430356414", | |
"hash_full_prompts": "ee7f14051468ed55", | |
"hash_input_tokens": "aebcd7428edf4a26", | |
"hash_cont_tokens": "5420388845898571" | |
}, | |
"truncated": 0, | |
"non_truncated": 8152, | |
"padded": 32448, | |
"non_padded": 160, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_geography|0": { | |
"hashes": { | |
"hash_examples": "d0ce2b019a66c1de", | |
"hash_full_prompts": "cffbd5df701c4a53", | |
"hash_input_tokens": "277bcd137b6e5a21", | |
"hash_cont_tokens": "fa4a2c8384dfaaa5" | |
}, | |
"truncated": 0, | |
"non_truncated": 198, | |
"padded": 768, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { | |
"hashes": { | |
"hash_examples": "7d7c6d476d0576b1", | |
"hash_full_prompts": "d25143524dfae79e", | |
"hash_input_tokens": "bc8ae01e68c30756", | |
"hash_cont_tokens": "682709d2fa91c75e" | |
}, | |
"truncated": 0, | |
"non_truncated": 193, | |
"padded": 768, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { | |
"hashes": { | |
"hash_examples": "694d3a01c6144ddb", | |
"hash_full_prompts": "5b1176d6f310caaf", | |
"hash_input_tokens": "04f8810c16f54c52", | |
"hash_cont_tokens": "4f2f97c723cb220f" | |
}, | |
"truncated": 0, | |
"non_truncated": 2891, | |
"padded": 11440, | |
"non_padded": 124, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_mathematics|0": { | |
"hashes": { | |
"hash_examples": "004f9c0a40b5ec10", | |
"hash_full_prompts": "0e68a48dcfc6de0a", | |
"hash_input_tokens": "00fa31cb1ed8919d", | |
"hash_cont_tokens": "8130a825e5a2ee3d" | |
}, | |
"truncated": 0, | |
"non_truncated": 270, | |
"padded": 1072, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_microeconomics|0": { | |
"hashes": { | |
"hash_examples": "80cf03d462e6ccbc", | |
"hash_full_prompts": "06e3f3da20b0f1e2", | |
"hash_input_tokens": "5353a76509c3d138", | |
"hash_cont_tokens": "4f6974070ef28d29" | |
}, | |
"truncated": 0, | |
"non_truncated": 238, | |
"padded": 948, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_physics|0": { | |
"hashes": { | |
"hash_examples": "92218def5b383845", | |
"hash_full_prompts": "f46eaa1db1b7a563", | |
"hash_input_tokens": "29a82d7f1dda5fef", | |
"hash_cont_tokens": "5d32bcd7ba8252ba" | |
}, | |
"truncated": 0, | |
"non_truncated": 151, | |
"padded": 604, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_psychology|0": { | |
"hashes": { | |
"hash_examples": "323f7848fee32e58", | |
"hash_full_prompts": "48630ebba7eb5b16", | |
"hash_input_tokens": "a0200531eef928fe", | |
"hash_cont_tokens": "1512a6938229952b" | |
}, | |
"truncated": 0, | |
"non_truncated": 545, | |
"padded": 2156, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_statistics|0": { | |
"hashes": { | |
"hash_examples": "d7bbe0d037cf31ec", | |
"hash_full_prompts": "a9eb3b25052517e4", | |
"hash_input_tokens": "cd7137e12fb50620", | |
"hash_cont_tokens": "95cb29e5c31221c8" | |
}, | |
"truncated": 0, | |
"non_truncated": 4232, | |
"padded": 16776, | |
"non_padded": 152, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_us_history|0": { | |
"hashes": { | |
"hash_examples": "722ec9207e3b0e04", | |
"hash_full_prompts": "2b5468ea8f17bf1c", | |
"hash_input_tokens": "fc2b6c2151889f0a", | |
"hash_cont_tokens": "0c31c2de1e3429bf" | |
}, | |
"truncated": 0, | |
"non_truncated": 204, | |
"padded": 816, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:high_school_world_history|0": { | |
"hashes": { | |
"hash_examples": "b5eb675d3b578584", | |
"hash_full_prompts": "e86385cb9247dbab", | |
"hash_input_tokens": "fed212747a11803a", | |
"hash_cont_tokens": "5e704d9d54138833" | |
}, | |
"truncated": 0, | |
"non_truncated": 237, | |
"padded": 948, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:human_aging|0": { | |
"hashes": { | |
"hash_examples": "713ac79cd2dd2d7b", | |
"hash_full_prompts": "6ab00057968ae8ca", | |
"hash_input_tokens": "52096bb95de03d04", | |
"hash_cont_tokens": "e5a3e63957647f04" | |
}, | |
"truncated": 0, | |
"non_truncated": 223, | |
"padded": 868, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:human_sexuality|0": { | |
"hashes": { | |
"hash_examples": "47551ab4e5dcf6c5", | |
"hash_full_prompts": "805fe5054a40800c", | |
"hash_input_tokens": "25de1efb68f0a820", | |
"hash_cont_tokens": "90a9b6d1231332f4" | |
}, | |
"truncated": 0, | |
"non_truncated": 131, | |
"padded": 512, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:international_law|0": { | |
"hashes": { | |
"hash_examples": "da360336943398d5", | |
"hash_full_prompts": "8ea1a622c6da1114", | |
"hash_input_tokens": "9d124d6c08622e73", | |
"hash_cont_tokens": "9ab33ab519d55748" | |
}, | |
"truncated": 0, | |
"non_truncated": 121, | |
"padded": 484, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:jurisprudence|0": { | |
"hashes": { | |
"hash_examples": "661d161a486fb035", | |
"hash_full_prompts": "423b5a498899e23f", | |
"hash_input_tokens": "a0ba5d868ff572e8", | |
"hash_cont_tokens": "2cecb6db3790a23b" | |
}, | |
"truncated": 0, | |
"non_truncated": 108, | |
"padded": 432, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:logical_fallacies|0": { | |
"hashes": { | |
"hash_examples": "5c3926384758bda7", | |
"hash_full_prompts": "33fe0f03191bd3e5", | |
"hash_input_tokens": "647ce0ecc284c1b4", | |
"hash_cont_tokens": "f5c60e363dd9fc3d" | |
}, | |
"truncated": 0, | |
"non_truncated": 163, | |
"padded": 640, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:machine_learning|0": { | |
"hashes": { | |
"hash_examples": "3ce756e6a22ffc48", | |
"hash_full_prompts": "d5b7664a63472867", | |
"hash_input_tokens": "cd6d7847d855fb5e", | |
"hash_cont_tokens": "d41e7e44237c0a16" | |
}, | |
"truncated": 0, | |
"non_truncated": 112, | |
"padded": 444, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:management|0": { | |
"hashes": { | |
"hash_examples": "20fe769bb3276832", | |
"hash_full_prompts": "ac1c2a4056812fc0", | |
"hash_input_tokens": "e505f3fa23d81970", | |
"hash_cont_tokens": "372864196dbb4cad" | |
}, | |
"truncated": 0, | |
"non_truncated": 103, | |
"padded": 396, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:marketing|0": { | |
"hashes": { | |
"hash_examples": "6b19449559d987ce", | |
"hash_full_prompts": "ec5aa6784dc98d57", | |
"hash_input_tokens": "077ac642f6789bae", | |
"hash_cont_tokens": "ad74b6b4e88f6100" | |
}, | |
"truncated": 0, | |
"non_truncated": 234, | |
"padded": 932, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:medical_genetics|0": { | |
"hashes": { | |
"hash_examples": "cbb0fa9df0f5435a", | |
"hash_full_prompts": "86ae86d774528961", | |
"hash_input_tokens": "3484a4c0ef9421bc", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 384, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:miscellaneous|0": { | |
"hashes": { | |
"hash_examples": "0a4134046c23cff9", | |
"hash_full_prompts": "ceb61e0e26580817", | |
"hash_input_tokens": "c8e9d3e7e41de745", | |
"hash_cont_tokens": "f2831dc319b7001c" | |
}, | |
"truncated": 0, | |
"non_truncated": 2420, | |
"padded": 9580, | |
"non_padded": 100, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_disputes|0": { | |
"hashes": { | |
"hash_examples": "1ac8a0967c82caa0", | |
"hash_full_prompts": "4fc612690d53f3ce", | |
"hash_input_tokens": "a4884ff914a4a5b0", | |
"hash_cont_tokens": "d6a32c4f89ec0e43" | |
}, | |
"truncated": 0, | |
"non_truncated": 346, | |
"padded": 1368, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:moral_scenarios|0": { | |
"hashes": { | |
"hash_examples": "2c0670188bc5a789", | |
"hash_full_prompts": "09c18e8a7364c43d", | |
"hash_input_tokens": "04807be188f1dc4a", | |
"hash_cont_tokens": "0abad6841e9b5dc1" | |
}, | |
"truncated": 0, | |
"non_truncated": 895, | |
"padded": 3504, | |
"non_padded": 76, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:nutrition|0": { | |
"hashes": { | |
"hash_examples": "658628c0dcdfe201", | |
"hash_full_prompts": "96d8b1f74ee63bf5", | |
"hash_input_tokens": "6cd78c7f519986ef", | |
"hash_cont_tokens": "1947ff415070dfa5" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1208, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:philosophy|0": { | |
"hashes": { | |
"hash_examples": "8b6707b322affafd", | |
"hash_full_prompts": "fa89fc8b0359514c", | |
"hash_input_tokens": "8603bb877ad4e485", | |
"hash_cont_tokens": "566ed263a8423f58" | |
}, | |
"truncated": 0, | |
"non_truncated": 311, | |
"padded": 1224, | |
"non_padded": 20, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:prehistory|0": { | |
"hashes": { | |
"hash_examples": "0c85ffcdc9a7b367", | |
"hash_full_prompts": "3ff1fb6b58ac4484", | |
"hash_input_tokens": "a8c6b538723c4fa0", | |
"hash_cont_tokens": "69725bb3099f23d0" | |
}, | |
"truncated": 0, | |
"non_truncated": 324, | |
"padded": 1272, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_accounting|0": { | |
"hashes": { | |
"hash_examples": "cce1ea2d5f544b2f", | |
"hash_full_prompts": "f16cd0f5e4fe2c66", | |
"hash_input_tokens": "fcfa4165705a9099", | |
"hash_cont_tokens": "25802ac32c51a7f7" | |
}, | |
"truncated": 0, | |
"non_truncated": 4514, | |
"padded": 17819, | |
"non_padded": 237, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_law|0": { | |
"hashes": { | |
"hash_examples": "1c654b024b54eb4b", | |
"hash_full_prompts": "971306cbbcf409ec", | |
"hash_input_tokens": "d507337bd69d109b", | |
"hash_cont_tokens": "7f2b1b7218a1ef40" | |
}, | |
"truncated": 0, | |
"non_truncated": 7987, | |
"padded": 31596, | |
"non_padded": 352, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_medicine|0": { | |
"hashes": { | |
"hash_examples": "c621eaacfa662ebc", | |
"hash_full_prompts": "e8723a21cb3e1733", | |
"hash_input_tokens": "46f65a5d9fdcfb57", | |
"hash_cont_tokens": "e1afe1503a5d02c5" | |
}, | |
"truncated": 0, | |
"non_truncated": 1637, | |
"padded": 6476, | |
"non_padded": 72, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:professional_psychology|0": { | |
"hashes": { | |
"hash_examples": "bc14a28eaec87dc4", | |
"hash_full_prompts": "0a47d09f99140517", | |
"hash_input_tokens": "d1c485cd8a6fbd54", | |
"hash_cont_tokens": "1c41f3eeadeec685" | |
}, | |
"truncated": 0, | |
"non_truncated": 3503, | |
"padded": 13760, | |
"non_padded": 252, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:public_relations|0": { | |
"hashes": { | |
"hash_examples": "de4989d9375885c4", | |
"hash_full_prompts": "35e624c8a427dbbf", | |
"hash_input_tokens": "939017fb3c8a30c6", | |
"hash_cont_tokens": "3914ab4a5d5b69e8" | |
}, | |
"truncated": 0, | |
"non_truncated": 110, | |
"padded": 432, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:security_studies|0": { | |
"hashes": { | |
"hash_examples": "3f84bfeec717c6de", | |
"hash_full_prompts": "da66f0eabda53a55", | |
"hash_input_tokens": "30f13bf690e0d26d", | |
"hash_cont_tokens": "1d2b199df736dea4" | |
}, | |
"truncated": 0, | |
"non_truncated": 245, | |
"padded": 972, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:sociology|0": { | |
"hashes": { | |
"hash_examples": "10d7c2fae10bfcbc", | |
"hash_full_prompts": "bf9b7f3f63fd36fd", | |
"hash_input_tokens": "9bb9004110d98127", | |
"hash_cont_tokens": "3556cb090eda6dec" | |
}, | |
"truncated": 0, | |
"non_truncated": 201, | |
"padded": 788, | |
"non_padded": 16, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:us_foreign_policy|0": { | |
"hashes": { | |
"hash_examples": "bb05f02c38ddaf1a", | |
"hash_full_prompts": "3226af39419ff020", | |
"hash_input_tokens": "012645cef04dca5c", | |
"hash_cont_tokens": "d35519013f781909" | |
}, | |
"truncated": 0, | |
"non_truncated": 100, | |
"padded": 376, | |
"non_padded": 24, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:virology|0": { | |
"hashes": { | |
"hash_examples": "290915a48884ede2", | |
"hash_full_prompts": "29bcda1d1d2c044c", | |
"hash_input_tokens": "7d5606563de661e0", | |
"hash_cont_tokens": "cbf93f8f3bd5c82c" | |
}, | |
"truncated": 0, | |
"non_truncated": 166, | |
"padded": 636, | |
"non_padded": 28, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|arabic_mmlu_ht:world_religions|0": { | |
"hashes": { | |
"hash_examples": "91cc5451c7284f75", | |
"hash_full_prompts": "c72573608e762b7b", | |
"hash_input_tokens": "85217a1891156ff7", | |
"hash_cont_tokens": "b5fbc024ac54a858" | |
}, | |
"truncated": 0, | |
"non_truncated": 171, | |
"padded": 672, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|madinah_qa:Arabic Language (General)|0": { | |
"hashes": { | |
"hash_examples": "25bf94d05f737b63", | |
"hash_full_prompts": "58373c1d046fcae1", | |
"hash_input_tokens": "a127d802e1b17613", | |
"hash_cont_tokens": "95a234c727b7b43c" | |
}, | |
"truncated": 0, | |
"non_truncated": 612, | |
"padded": 2399, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|madinah_qa:Arabic Language (Grammar)|0": { | |
"hashes": { | |
"hash_examples": "e65fe4df843f4380", | |
"hash_full_prompts": "96cd593b0cba9400", | |
"hash_input_tokens": "cc0d88c66fc53f5f", | |
"hash_cont_tokens": "98f874e7446c544b" | |
}, | |
"truncated": 0, | |
"non_truncated": 365, | |
"padded": 1561, | |
"non_padded": 27, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Ethics|0": { | |
"hashes": { | |
"hash_examples": "5d32da36271c5eb4", | |
"hash_full_prompts": "80ae56261ebe1cd9", | |
"hash_input_tokens": "ad7591340704b393", | |
"hash_cont_tokens": "67fe5dc315ef723c" | |
}, | |
"truncated": 0, | |
"non_truncated": 60, | |
"padded": 180, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Illegal|0": { | |
"hashes": { | |
"hash_examples": "0c07f1f100f2d0e8", | |
"hash_full_prompts": "96bfa7a00fe0a317", | |
"hash_input_tokens": "2779e472ad3d99f2", | |
"hash_cont_tokens": "2cc82a58b4d87abc" | |
}, | |
"truncated": 0, | |
"non_truncated": 53, | |
"padded": 159, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:MentalHealth|0": { | |
"hashes": { | |
"hash_examples": "8e5fc5c4704bd96b", | |
"hash_full_prompts": "e982a877409bb812", | |
"hash_input_tokens": "2ab1e4f3157d2378", | |
"hash_cont_tokens": "7b399d0f0a9124f1" | |
}, | |
"truncated": 0, | |
"non_truncated": 76, | |
"padded": 228, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Offensive|0": { | |
"hashes": { | |
"hash_examples": "5ad4369b7dc5de46", | |
"hash_full_prompts": "7429e2df2148ac0b", | |
"hash_input_tokens": "4127f620c31660fb", | |
"hash_cont_tokens": "0cd5015bc3370adf" | |
}, | |
"truncated": 0, | |
"non_truncated": 69, | |
"padded": 207, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:PhysicalHealth|0": { | |
"hashes": { | |
"hash_examples": "dc2a632e2dcc86db", | |
"hash_full_prompts": "7c3a9369922eaf07", | |
"hash_input_tokens": "7d5e958fab02afcc", | |
"hash_cont_tokens": "cb8655dcad91858d" | |
}, | |
"truncated": 0, | |
"non_truncated": 73, | |
"padded": 210, | |
"non_padded": 9, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Privacy|0": { | |
"hashes": { | |
"hash_examples": "295e35448a39e003", | |
"hash_full_prompts": "ed3835c5726716c3", | |
"hash_input_tokens": "1552aee591bc6625", | |
"hash_cont_tokens": "7f23416c661e2ee5" | |
}, | |
"truncated": 0, | |
"non_truncated": 57, | |
"padded": 162, | |
"non_padded": 9, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Trustfulness|0": { | |
"hashes": { | |
"hash_examples": "e79ac1ea5439e623", | |
"hash_full_prompts": "2d0c1d4caad65add", | |
"hash_input_tokens": "baccbd4e2df8e550", | |
"hash_cont_tokens": "ff874dba360c1ede" | |
}, | |
"truncated": 0, | |
"non_truncated": 78, | |
"padded": 228, | |
"non_padded": 6, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|aratrust:Unfairness|0": { | |
"hashes": { | |
"hash_examples": "4ac5dccbfbdc5077", | |
"hash_full_prompts": "bb6e622d00f3f338", | |
"hash_input_tokens": "9381677b1c17cab4", | |
"hash_cont_tokens": "3e990fe3a474dbc5" | |
}, | |
"truncated": 0, | |
"non_truncated": 55, | |
"padded": 159, | |
"non_padded": 6, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"community|alrage_qa|0": { | |
"hashes": { | |
"hash_examples": "3edbbe22cabd4160", | |
"hash_full_prompts": "8c017c75b4622fb5", | |
"hash_input_tokens": "dbccd73d9313a2b1", | |
"hash_cont_tokens": "f6fe9ce98090c8c5" | |
}, | |
"truncated": 2106, | |
"non_truncated": 0, | |
"padded": 2106, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "b8b3b49631adcc40", | |
"hash_full_prompts": "901791b0d4b8c302", | |
"hash_input_tokens": "9aa94a9e7786c225", | |
"hash_cont_tokens": "a7359cec138d52a8" | |
}, | |
"truncated": 521, | |
"non_truncated": 91341, | |
"padded": 333440, | |
"non_padded": 3444, | |
"num_truncated_few_shots": 0 | |
} | |
} |