|
{ |
|
"config_general": { |
|
"lighteval_sha": "?", |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null, |
|
"job_id": 0, |
|
"start_time": 737.797443611, |
|
"end_time": 84844.217831501, |
|
"total_evaluation_time_secondes": "84106.42038789", |
|
"model_name": "Qwen/Qwen2.5-72B", |
|
"model_sha": "efba10c8e54e91e0d9570ab5f7b51a958474d4cb", |
|
"model_dtype": "torch.bfloat16", |
|
"model_size": "135.43 GB" |
|
}, |
|
"results": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"acc_norm": 0.4021543985637343, |
|
"acc_norm_stderr": 0.02079474192842359 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"acc_norm": 0.5486561631139945, |
|
"acc_norm_stderr": 0.00677561027945605 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"acc_norm": 0.5810055865921788, |
|
"acc_norm_stderr": 0.016501579306861677 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.05807730170189531 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"acc_norm": 0.7666666666666667, |
|
"acc_norm_stderr": 0.03464962582372409 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.038618963090899454 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"acc_norm": 0.8212632895559725, |
|
"acc_norm_stderr": 0.004285149849174572 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"acc_norm": 0.5751459549624687, |
|
"acc_norm_stderr": 0.006384847729010638 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"acc_norm": 0.41627906976744183, |
|
"acc_norm_stderr": 0.011889319071906443 |
|
}, |
|
"community|arabic_exams|0": { |
|
"acc_norm": 0.46368715083798884, |
|
"acc_norm_stderr": 0.021539679498168932 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"acc_norm": 0.6081081081081081, |
|
"acc_norm_stderr": 0.05713629906375233 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"acc_norm": 0.5637254901960784, |
|
"acc_norm_stderr": 0.020062874243539135 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.4328767123287671, |
|
"acc_norm_stderr": 0.025969892979221344 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"acc_norm": 0.3435897435897436, |
|
"acc_norm_stderr": 0.024078696580635477 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.09245003270420485 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"acc_norm": 0.6468253968253969, |
|
"acc_norm_stderr": 0.03016833930798728 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"acc_norm": 0.5102909865152591, |
|
"acc_norm_stderr": 0.013322222111426639 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"acc_norm": 0.5057471264367817, |
|
"acc_norm_stderr": 0.053912824825556656 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"acc_norm": 0.4872881355932203, |
|
"acc_norm_stderr": 0.03260586088180841 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"acc_norm": 0.685823754789272, |
|
"acc_norm_stderr": 0.028787653079590046 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"acc_norm": 0.7407407407407407, |
|
"acc_norm_stderr": 0.08594360757264022 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"acc_norm": 0.7947368421052632, |
|
"acc_norm_stderr": 0.029378950271893606 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"acc_norm": 0.65625, |
|
"acc_norm_stderr": 0.05983919423477113 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"acc_norm": 0.7200660611065235, |
|
"acc_norm_stderr": 0.01290687973217031 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"acc_norm": 0.6694444444444444, |
|
"acc_norm_stderr": 0.024827459203325592 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"acc_norm": 0.7701149425287356, |
|
"acc_norm_stderr": 0.04537158185250774 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"acc_norm": 0.5985401459854015, |
|
"acc_norm_stderr": 0.04203376090830894 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"acc_norm": 0.6365740740740741, |
|
"acc_norm_stderr": 0.016372953501505756 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"acc_norm": 0.7558139534883721, |
|
"acc_norm_stderr": 0.03285260554707746 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"acc_norm": 0.691358024691358, |
|
"acc_norm_stderr": 0.03640542127281407 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"acc_norm": 0.594412331406551, |
|
"acc_norm_stderr": 0.015247438777552136 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"acc_norm": 0.6875, |
|
"acc_norm_stderr": 0.02815637344037142 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"acc_norm": 0.7017543859649122, |
|
"acc_norm_stderr": 0.06113439056466399 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"acc_norm": 0.4855263157894737, |
|
"acc_norm_stderr": 0.018141244706646364 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"acc_norm": 0.6699507389162561, |
|
"acc_norm_stderr": 0.033085304262282574 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"acc_norm": 0.7058823529411765, |
|
"acc_norm_stderr": 0.04533838195929776 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"acc_norm": 0.5477308294209703, |
|
"acc_norm_stderr": 0.01970478706070722 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"acc_norm": 0.7155688622754491, |
|
"acc_norm_stderr": 0.024722487703506966 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"acc_norm": 0.680672268907563, |
|
"acc_norm_stderr": 0.030283995525884396 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"acc_norm": 0.7947947947947948, |
|
"acc_norm_stderr": 0.012783695659367022 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"acc_norm": 0.6528662420382165, |
|
"acc_norm_stderr": 0.02690843021726741 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"acc_norm": 0.7866666666666666, |
|
"acc_norm_stderr": 0.047622136321965505 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"acc_norm": 0.7237163814180929, |
|
"acc_norm_stderr": 0.02213765870754959 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"acc_norm": 0.743801652892562, |
|
"acc_norm_stderr": 0.028119539879165812 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"acc_norm": 0.8541666666666666, |
|
"acc_norm_stderr": 0.019283120158610514 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"acc_norm": 0.5897435897435898, |
|
"acc_norm_stderr": 0.07979349797082042 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"acc_norm": 0.5176470588235295, |
|
"acc_norm_stderr": 0.03135324402176752 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"acc_norm": 0.6333333333333333, |
|
"acc_norm_stderr": 0.03333333333333339 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"acc_norm": 0.49377593360995853, |
|
"acc_norm_stderr": 0.03227236052966302 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"acc_norm": 0.7943262411347518, |
|
"acc_norm_stderr": 0.015233600710136162 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"acc_norm": 0.5111111111111111, |
|
"acc_norm_stderr": 0.04318275491977978 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"acc_norm": 0.8092105263157895, |
|
"acc_norm_stderr": 0.031975658210325 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"acc_norm": 0.72, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"acc_norm": 0.6641509433962264, |
|
"acc_norm_stderr": 0.029067220146644826 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"acc_norm": 0.8125, |
|
"acc_norm_stderr": 0.032639560491693344 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"acc_norm": 0.630057803468208, |
|
"acc_norm_stderr": 0.036812296333943194 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"acc_norm": 0.46078431372549017, |
|
"acc_norm_stderr": 0.049598599663841815 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"acc_norm": 0.73, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"acc_norm": 0.7404255319148936, |
|
"acc_norm_stderr": 0.028659179374292326 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"acc_norm": 0.5087719298245614, |
|
"acc_norm_stderr": 0.047028804320496165 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"acc_norm": 0.5448275862068965, |
|
"acc_norm_stderr": 0.04149886942192118 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"acc_norm": 0.5925925925925926, |
|
"acc_norm_stderr": 0.02530590624159063 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"acc_norm": 0.47619047619047616, |
|
"acc_norm_stderr": 0.04467062628403273 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.050241839379569095 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"acc_norm": 0.7498033044846577, |
|
"acc_norm_stderr": 0.007015166994542313 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"acc_norm": 0.7420318725099602, |
|
"acc_norm_stderr": 0.006904806791953254 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"acc_norm": 0.84, |
|
"acc_norm_stderr": 0.03684529491774709 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"acc_norm": 0.6399656526005888, |
|
"acc_norm_stderr": 0.005316733030309324 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"acc_norm": 0.7878787878787878, |
|
"acc_norm_stderr": 0.029126522834586825 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"acc_norm": 0.8497409326424871, |
|
"acc_norm_stderr": 0.025787723180723872 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"acc_norm": 0.7637495676236596, |
|
"acc_norm_stderr": 0.007901560565461969 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"acc_norm": 0.337037037037037, |
|
"acc_norm_stderr": 0.028820884666253252 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"acc_norm": 0.7436974789915967, |
|
"acc_norm_stderr": 0.02835962087053395 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"acc_norm": 0.5364238410596026, |
|
"acc_norm_stderr": 0.04071636065944216 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"acc_norm": 0.7944954128440367, |
|
"acc_norm_stderr": 0.017324352325016005 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"acc_norm": 0.7339319470699432, |
|
"acc_norm_stderr": 0.006793643436795497 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"acc_norm": 0.8480392156862745, |
|
"acc_norm_stderr": 0.025195658428931792 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"acc_norm": 0.8396624472573839, |
|
"acc_norm_stderr": 0.02388438092596567 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"acc_norm": 0.695067264573991, |
|
"acc_norm_stderr": 0.030898610882477515 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"acc_norm": 0.7175572519083969, |
|
"acc_norm_stderr": 0.03948406125768361 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"acc_norm": 0.7603305785123967, |
|
"acc_norm_stderr": 0.03896878985070416 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"acc_norm": 0.7407407407407407, |
|
"acc_norm_stderr": 0.042365112580946315 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"acc_norm": 0.6871165644171779, |
|
"acc_norm_stderr": 0.036429145782924055 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"acc_norm": 0.5357142857142857, |
|
"acc_norm_stderr": 0.04733667890053756 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"acc_norm": 0.6796116504854369, |
|
"acc_norm_stderr": 0.04620284082280041 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"acc_norm": 0.8333333333333334, |
|
"acc_norm_stderr": 0.024414947304543678 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"acc_norm": 0.67, |
|
"acc_norm_stderr": 0.04725815626252607 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"acc_norm": 0.7830578512396694, |
|
"acc_norm_stderr": 0.0083801339141947 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"acc_norm": 0.6358381502890174, |
|
"acc_norm_stderr": 0.025906632631016127 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"acc_norm": 0.41675977653631285, |
|
"acc_norm_stderr": 0.01648913496243895 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"acc_norm": 0.6797385620915033, |
|
"acc_norm_stderr": 0.026716118380156847 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"acc_norm": 0.6591639871382636, |
|
"acc_norm_stderr": 0.026920841260776162 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"acc_norm": 0.7253086419753086, |
|
"acc_norm_stderr": 0.024836057868294677 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"acc_norm": 0.7157731501993797, |
|
"acc_norm_stderr": 0.006714096604099379 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"acc_norm": 0.6372855890822586, |
|
"acc_norm_stderr": 0.005380031288753342 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"acc_norm": 0.7758094074526573, |
|
"acc_norm_stderr": 0.010310852246980131 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"acc_norm": 0.7479303454182129, |
|
"acc_norm_stderr": 0.007337241085377199 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"acc_norm": 0.5818181818181818, |
|
"acc_norm_stderr": 0.04724577405731572 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.031362502409358936 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"acc_norm": 0.746268656716418, |
|
"acc_norm_stderr": 0.03076944496729602 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"acc_norm": 0.77, |
|
"acc_norm_stderr": 0.04229525846816505 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.03892494720807614 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"acc_norm": 0.7777777777777778, |
|
"acc_norm_stderr": 0.03188578017686398 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"acc_norm": 0.5996732026143791, |
|
"acc_norm_stderr": 0.019821843688271768 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"acc_norm": 0.4794520547945205, |
|
"acc_norm_stderr": 0.026184981327269003 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"acc_norm": 0.8, |
|
"acc_norm_stderr": 0.05207556439232953 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"acc_norm": 0.9245283018867925, |
|
"acc_norm_stderr": 0.03663114503374153 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"acc_norm": 0.9473684210526315, |
|
"acc_norm_stderr": 0.025784102555612406 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"acc_norm": 0.7971014492753623, |
|
"acc_norm_stderr": 0.048768771474726615 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"acc_norm": 0.9041095890410958, |
|
"acc_norm_stderr": 0.034700192131439864 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"acc_norm": 0.9649122807017544, |
|
"acc_norm_stderr": 0.024588241692166144 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"acc_norm": 0.782051282051282, |
|
"acc_norm_stderr": 0.04704889337109349 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"acc_norm": 0.9636363636363636, |
|
"acc_norm_stderr": 0.025473778207916145 |
|
}, |
|
"community|alghafa:_average|0": { |
|
"acc_norm": 0.5886486439876805, |
|
"acc_norm_stderr": 0.021997459864594646 |
|
}, |
|
"community|arabic_mmlu:_average|0": { |
|
"acc_norm": 0.6464604499239681, |
|
"acc_norm_stderr": 0.0347270532846324 |
|
}, |
|
"community|arabic_mmlu_ht:_average|0": { |
|
"acc_norm": 0.664369860699175, |
|
"acc_norm_stderr": 0.031143334587463947 |
|
}, |
|
"community|madinah_qa:_average|0": { |
|
"acc_norm": 0.5395626287044498, |
|
"acc_norm_stderr": 0.023003412507770386 |
|
}, |
|
"community|aratrust:_average|0": { |
|
"acc_norm": 0.8854634609556603, |
|
"acc_norm_stderr": 0.036883836107378214 |
|
}, |
|
"all": { |
|
"acc_norm": 0.6636910935777179, |
|
"acc_norm_stderr": 0.03183629517115239, |
|
"llm_as_judge": 0.7737416904083511, |
|
"llm_as_judge_stderr": 0.00011860756369976968 |
|
}, |
|
"community|alrage_qa|0": { |
|
"llm_as_judge": 0.7737416904083511, |
|
"llm_as_judge_stderr": 0.00011860756369976968 |
|
} |
|
}, |
|
"versions": { |
|
"community|alghafa:mcq_exams_test_ar|0": 0, |
|
"community|alghafa:meta_ar_dialects|0": 0, |
|
"community|alghafa:meta_ar_msa|0": 0, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": 0, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": 0, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": 0, |
|
"community|alghafa:multiple_choice_sentiment_task|0": 0, |
|
"community|arabic_exams|0": 0, |
|
"community|arabic_mmlu:Accounting (University)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (General)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": 0, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": 0, |
|
"community|arabic_mmlu:Biology (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (High School)|0": 0, |
|
"community|arabic_mmlu:Civics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (High School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Computer Science (University)|0": 0, |
|
"community|arabic_mmlu:Driving Test|0": 0, |
|
"community|arabic_mmlu:Economics (High School)|0": 0, |
|
"community|arabic_mmlu:Economics (Middle School)|0": 0, |
|
"community|arabic_mmlu:Economics (University)|0": 0, |
|
"community|arabic_mmlu:General Knowledge|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": 0, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": 0, |
|
"community|arabic_mmlu:Geography (High School)|0": 0, |
|
"community|arabic_mmlu:Geography (Middle School)|0": 0, |
|
"community|arabic_mmlu:Geography (Primary School)|0": 0, |
|
"community|arabic_mmlu:History (High School)|0": 0, |
|
"community|arabic_mmlu:History (Middle School)|0": 0, |
|
"community|arabic_mmlu:History (Primary School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": 0, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": 0, |
|
"community|arabic_mmlu:Law (Professional)|0": 0, |
|
"community|arabic_mmlu:Management (University)|0": 0, |
|
"community|arabic_mmlu:Math (Primary School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": 0, |
|
"community|arabic_mmlu:Philosophy (High School)|0": 0, |
|
"community|arabic_mmlu:Physics (High School)|0": 0, |
|
"community|arabic_mmlu:Political Science (University)|0": 0, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": 0, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": 0, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": 0, |
|
"community|arabic_mmlu_ht:anatomy|0": 0, |
|
"community|arabic_mmlu_ht:astronomy|0": 0, |
|
"community|arabic_mmlu_ht:business_ethics|0": 0, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": 0, |
|
"community|arabic_mmlu_ht:college_biology|0": 0, |
|
"community|arabic_mmlu_ht:college_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:college_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:college_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:college_medicine|0": 0, |
|
"community|arabic_mmlu_ht:college_physics|0": 0, |
|
"community|arabic_mmlu_ht:computer_security|0": 0, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": 0, |
|
"community|arabic_mmlu_ht:econometrics|0": 0, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": 0, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:formal_logic|0": 0, |
|
"community|arabic_mmlu_ht:global_facts|0": 0, |
|
"community|arabic_mmlu_ht:high_school_biology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": 0, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": 0, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_geography|0": 0, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_physics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": 0, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": 0, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": 0, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": 0, |
|
"community|arabic_mmlu_ht:human_aging|0": 0, |
|
"community|arabic_mmlu_ht:human_sexuality|0": 0, |
|
"community|arabic_mmlu_ht:international_law|0": 0, |
|
"community|arabic_mmlu_ht:jurisprudence|0": 0, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": 0, |
|
"community|arabic_mmlu_ht:machine_learning|0": 0, |
|
"community|arabic_mmlu_ht:management|0": 0, |
|
"community|arabic_mmlu_ht:marketing|0": 0, |
|
"community|arabic_mmlu_ht:medical_genetics|0": 0, |
|
"community|arabic_mmlu_ht:miscellaneous|0": 0, |
|
"community|arabic_mmlu_ht:moral_disputes|0": 0, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": 0, |
|
"community|arabic_mmlu_ht:nutrition|0": 0, |
|
"community|arabic_mmlu_ht:philosophy|0": 0, |
|
"community|arabic_mmlu_ht:prehistory|0": 0, |
|
"community|arabic_mmlu_ht:professional_accounting|0": 0, |
|
"community|arabic_mmlu_ht:professional_law|0": 0, |
|
"community|arabic_mmlu_ht:professional_medicine|0": 0, |
|
"community|arabic_mmlu_ht:professional_psychology|0": 0, |
|
"community|arabic_mmlu_ht:public_relations|0": 0, |
|
"community|arabic_mmlu_ht:security_studies|0": 0, |
|
"community|arabic_mmlu_ht:sociology|0": 0, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": 0, |
|
"community|arabic_mmlu_ht:virology|0": 0, |
|
"community|arabic_mmlu_ht:world_religions|0": 0, |
|
"community|aratrust:Ethics|0": 0, |
|
"community|aratrust:Illegal|0": 0, |
|
"community|aratrust:MentalHealth|0": 0, |
|
"community|aratrust:Offensive|0": 0, |
|
"community|aratrust:PhysicalHealth|0": 0, |
|
"community|aratrust:Privacy|0": 0, |
|
"community|aratrust:Trustfulness|0": 0, |
|
"community|aratrust:Unfairness|0": 0, |
|
"community|madinah_qa:Arabic Language (General)|0": 0, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": 0, |
|
"community|alrage_qa|0": 0 |
|
}, |
|
"config_tasks": { |
|
"community|alghafa:mcq_exams_test_ar": { |
|
"name": "alghafa:mcq_exams_test_ar", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "mcq_exams_test_ar", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 557, |
|
"effective_num_docs": 557, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects": { |
|
"name": "alghafa:meta_ar_dialects", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_dialects", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5395, |
|
"effective_num_docs": 5395, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa": { |
|
"name": "alghafa:meta_ar_msa", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "meta_ar_msa", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task": { |
|
"name": "alghafa:multiple_choice_facts_truefalse_balanced_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_facts_truefalse_balanced_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_soqal_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_soqal_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task": { |
|
"name": "alghafa:multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_grounded_statement_xglue_mlqa_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 150, |
|
"effective_num_docs": 150, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_no_neutral_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_no_neutral_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7995, |
|
"effective_num_docs": 7995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task": { |
|
"name": "alghafa:multiple_choice_rating_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_rating_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 5995, |
|
"effective_num_docs": 5995, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task": { |
|
"name": "alghafa:multiple_choice_sentiment_task", |
|
"prompt_function": "alghafa_pfn", |
|
"hf_repo": "OALL/AlGhafa-Arabic-LLM-Benchmark-Native", |
|
"hf_subset": "multiple_choice_sentiment_task", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1720, |
|
"effective_num_docs": 1720, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_exams": { |
|
"name": "arabic_exams", |
|
"prompt_function": "arabic_exams_pfn", |
|
"hf_repo": "OALL/Arabic_EXAMS", |
|
"hf_subset": "default", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test", |
|
"validation" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": "validation", |
|
"few_shots_select": "sequential", |
|
"generation_size": null, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 537, |
|
"effective_num_docs": 537, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)": { |
|
"name": "arabic_mmlu:Accounting (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Accounting (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 74, |
|
"effective_num_docs": 74, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)": { |
|
"name": "arabic_mmlu:Arabic Language (General)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)": { |
|
"name": "arabic_mmlu:Arabic Language (Grammar)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)": { |
|
"name": "arabic_mmlu:Arabic Language (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 390, |
|
"effective_num_docs": 390, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)": { |
|
"name": "arabic_mmlu:Arabic Language (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)": { |
|
"name": "arabic_mmlu:Arabic Language (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Arabic Language (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 252, |
|
"effective_num_docs": 252, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)": { |
|
"name": "arabic_mmlu:Biology (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Biology (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1409, |
|
"effective_num_docs": 1409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)": { |
|
"name": "arabic_mmlu:Civics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)": { |
|
"name": "arabic_mmlu:Civics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Civics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 236, |
|
"effective_num_docs": 236, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)": { |
|
"name": "arabic_mmlu:Computer Science (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 261, |
|
"effective_num_docs": 261, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)": { |
|
"name": "arabic_mmlu:Computer Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 27, |
|
"effective_num_docs": 27, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)": { |
|
"name": "arabic_mmlu:Computer Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 190, |
|
"effective_num_docs": 190, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)": { |
|
"name": "arabic_mmlu:Computer Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Computer Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 64, |
|
"effective_num_docs": 64, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test": { |
|
"name": "arabic_mmlu:Driving Test", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Driving Test", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1211, |
|
"effective_num_docs": 1211, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)": { |
|
"name": "arabic_mmlu:Economics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 360, |
|
"effective_num_docs": 360, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)": { |
|
"name": "arabic_mmlu:Economics (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 87, |
|
"effective_num_docs": 87, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)": { |
|
"name": "arabic_mmlu:Economics (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Economics (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 137, |
|
"effective_num_docs": 137, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge": { |
|
"name": "arabic_mmlu:General Knowledge", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 864, |
|
"effective_num_docs": 864, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)": { |
|
"name": "arabic_mmlu:General Knowledge (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 172, |
|
"effective_num_docs": 172, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)": { |
|
"name": "arabic_mmlu:General Knowledge (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "General Knowledge (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 162, |
|
"effective_num_docs": 162, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)": { |
|
"name": "arabic_mmlu:Geography (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1038, |
|
"effective_num_docs": 1038, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)": { |
|
"name": "arabic_mmlu:Geography (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 272, |
|
"effective_num_docs": 272, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)": { |
|
"name": "arabic_mmlu:Geography (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Geography (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)": { |
|
"name": "arabic_mmlu:History (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 760, |
|
"effective_num_docs": 760, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)": { |
|
"name": "arabic_mmlu:History (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 203, |
|
"effective_num_docs": 203, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)": { |
|
"name": "arabic_mmlu:History (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "History (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies": { |
|
"name": "arabic_mmlu:Islamic Studies", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 639, |
|
"effective_num_docs": 639, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)": { |
|
"name": "arabic_mmlu:Islamic Studies (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 334, |
|
"effective_num_docs": 334, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)": { |
|
"name": "arabic_mmlu:Islamic Studies (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Islamic Studies (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 999, |
|
"effective_num_docs": 999, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)": { |
|
"name": "arabic_mmlu:Law (Professional)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Law (Professional)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 314, |
|
"effective_num_docs": 314, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)": { |
|
"name": "arabic_mmlu:Management (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Management (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 75, |
|
"effective_num_docs": 75, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)": { |
|
"name": "arabic_mmlu:Math (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Math (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 409, |
|
"effective_num_docs": 409, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)": { |
|
"name": "arabic_mmlu:Natural Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 242, |
|
"effective_num_docs": 242, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)": { |
|
"name": "arabic_mmlu:Natural Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Natural Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 336, |
|
"effective_num_docs": 336, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)": { |
|
"name": "arabic_mmlu:Philosophy (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Philosophy (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 39, |
|
"effective_num_docs": 39, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)": { |
|
"name": "arabic_mmlu:Physics (High School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Physics (High School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 255, |
|
"effective_num_docs": 255, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)": { |
|
"name": "arabic_mmlu:Political Science (University)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Political Science (University)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 210, |
|
"effective_num_docs": 210, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)": { |
|
"name": "arabic_mmlu:Social Science (Middle School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Middle School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 241, |
|
"effective_num_docs": 241, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)": { |
|
"name": "arabic_mmlu:Social Science (Primary School)", |
|
"prompt_function": "arabic_mmlu_pfn", |
|
"hf_repo": "MBZUAI/ArabicMMLU", |
|
"hf_subset": "Social Science (Primary School)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 705, |
|
"effective_num_docs": 705, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra": { |
|
"name": "arabic_mmlu_ht:abstract_algebra", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "abstract_algebra", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy": { |
|
"name": "arabic_mmlu_ht:anatomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "anatomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 135, |
|
"effective_num_docs": 135, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy": { |
|
"name": "arabic_mmlu_ht:astronomy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "astronomy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 152, |
|
"effective_num_docs": 152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics": { |
|
"name": "arabic_mmlu_ht:business_ethics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "business_ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge": { |
|
"name": "arabic_mmlu_ht:clinical_knowledge", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "clinical_knowledge", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 265, |
|
"effective_num_docs": 265, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology": { |
|
"name": "arabic_mmlu_ht:college_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 144, |
|
"effective_num_docs": 144, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry": { |
|
"name": "arabic_mmlu_ht:college_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science": { |
|
"name": "arabic_mmlu_ht:college_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics": { |
|
"name": "arabic_mmlu_ht:college_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine": { |
|
"name": "arabic_mmlu_ht:college_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 173, |
|
"effective_num_docs": 173, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics": { |
|
"name": "arabic_mmlu_ht:college_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "college_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 102, |
|
"effective_num_docs": 102, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security": { |
|
"name": "arabic_mmlu_ht:computer_security", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "computer_security", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics": { |
|
"name": "arabic_mmlu_ht:conceptual_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "conceptual_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 235, |
|
"effective_num_docs": 235, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics": { |
|
"name": "arabic_mmlu_ht:econometrics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "econometrics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 114, |
|
"effective_num_docs": 114, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering": { |
|
"name": "arabic_mmlu_ht:electrical_engineering", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "electrical_engineering", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 145, |
|
"effective_num_docs": 145, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics": { |
|
"name": "arabic_mmlu_ht:elementary_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "elementary_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 378, |
|
"effective_num_docs": 378, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic": { |
|
"name": "arabic_mmlu_ht:formal_logic", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "formal_logic", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 126, |
|
"effective_num_docs": 126, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts": { |
|
"name": "arabic_mmlu_ht:global_facts", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "global_facts", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology": { |
|
"name": "arabic_mmlu_ht:high_school_biology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_biology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3813, |
|
"effective_num_docs": 3813, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry": { |
|
"name": "arabic_mmlu_ht:high_school_chemistry", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_chemistry", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4016, |
|
"effective_num_docs": 4016, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science": { |
|
"name": "arabic_mmlu_ht:high_school_computer_science", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_computer_science", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history": { |
|
"name": "arabic_mmlu_ht:high_school_european_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_european_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 8152, |
|
"effective_num_docs": 8152, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography": { |
|
"name": "arabic_mmlu_ht:high_school_geography", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_geography", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 198, |
|
"effective_num_docs": 198, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics": { |
|
"name": "arabic_mmlu_ht:high_school_government_and_politics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_government_and_politics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 193, |
|
"effective_num_docs": 193, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_macroeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_macroeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2891, |
|
"effective_num_docs": 2891, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics": { |
|
"name": "arabic_mmlu_ht:high_school_mathematics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_mathematics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 270, |
|
"effective_num_docs": 270, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics": { |
|
"name": "arabic_mmlu_ht:high_school_microeconomics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_microeconomics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 238, |
|
"effective_num_docs": 238, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics": { |
|
"name": "arabic_mmlu_ht:high_school_physics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_physics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 151, |
|
"effective_num_docs": 151, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology": { |
|
"name": "arabic_mmlu_ht:high_school_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 545, |
|
"effective_num_docs": 545, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics": { |
|
"name": "arabic_mmlu_ht:high_school_statistics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_statistics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4232, |
|
"effective_num_docs": 4232, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history": { |
|
"name": "arabic_mmlu_ht:high_school_us_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_us_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 204, |
|
"effective_num_docs": 204, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history": { |
|
"name": "arabic_mmlu_ht:high_school_world_history", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "high_school_world_history", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 237, |
|
"effective_num_docs": 237, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging": { |
|
"name": "arabic_mmlu_ht:human_aging", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_aging", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 223, |
|
"effective_num_docs": 223, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality": { |
|
"name": "arabic_mmlu_ht:human_sexuality", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "human_sexuality", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 131, |
|
"effective_num_docs": 131, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law": { |
|
"name": "arabic_mmlu_ht:international_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "international_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 121, |
|
"effective_num_docs": 121, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence": { |
|
"name": "arabic_mmlu_ht:jurisprudence", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "jurisprudence", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 108, |
|
"effective_num_docs": 108, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies": { |
|
"name": "arabic_mmlu_ht:logical_fallacies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "logical_fallacies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 163, |
|
"effective_num_docs": 163, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning": { |
|
"name": "arabic_mmlu_ht:machine_learning", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "machine_learning", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 112, |
|
"effective_num_docs": 112, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management": { |
|
"name": "arabic_mmlu_ht:management", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "management", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 103, |
|
"effective_num_docs": 103, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing": { |
|
"name": "arabic_mmlu_ht:marketing", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "marketing", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 234, |
|
"effective_num_docs": 234, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics": { |
|
"name": "arabic_mmlu_ht:medical_genetics", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "medical_genetics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous": { |
|
"name": "arabic_mmlu_ht:miscellaneous", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "miscellaneous", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2420, |
|
"effective_num_docs": 2420, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes": { |
|
"name": "arabic_mmlu_ht:moral_disputes", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_disputes", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 346, |
|
"effective_num_docs": 346, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios": { |
|
"name": "arabic_mmlu_ht:moral_scenarios", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "moral_scenarios", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 895, |
|
"effective_num_docs": 895, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition": { |
|
"name": "arabic_mmlu_ht:nutrition", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "nutrition", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 306, |
|
"effective_num_docs": 306, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy": { |
|
"name": "arabic_mmlu_ht:philosophy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "philosophy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 311, |
|
"effective_num_docs": 311, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory": { |
|
"name": "arabic_mmlu_ht:prehistory", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "prehistory", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 324, |
|
"effective_num_docs": 324, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting": { |
|
"name": "arabic_mmlu_ht:professional_accounting", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_accounting", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 4514, |
|
"effective_num_docs": 4514, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law": { |
|
"name": "arabic_mmlu_ht:professional_law", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_law", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 7987, |
|
"effective_num_docs": 7987, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine": { |
|
"name": "arabic_mmlu_ht:professional_medicine", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_medicine", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 1637, |
|
"effective_num_docs": 1637, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology": { |
|
"name": "arabic_mmlu_ht:professional_psychology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "professional_psychology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 3503, |
|
"effective_num_docs": 3503, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations": { |
|
"name": "arabic_mmlu_ht:public_relations", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "public_relations", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 110, |
|
"effective_num_docs": 110, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies": { |
|
"name": "arabic_mmlu_ht:security_studies", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "security_studies", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 245, |
|
"effective_num_docs": 245, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology": { |
|
"name": "arabic_mmlu_ht:sociology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "sociology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 201, |
|
"effective_num_docs": 201, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy": { |
|
"name": "arabic_mmlu_ht:us_foreign_policy", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "us_foreign_policy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 100, |
|
"effective_num_docs": 100, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology": { |
|
"name": "arabic_mmlu_ht:virology", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "virology", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 166, |
|
"effective_num_docs": 166, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions": { |
|
"name": "arabic_mmlu_ht:world_religions", |
|
"prompt_function": "arabic_mmlu_ht_pfn", |
|
"hf_repo": "MBZUAI/human_translated_arabic_mmlu", |
|
"hf_subset": "world_religions", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 171, |
|
"effective_num_docs": 171, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Ethics": { |
|
"name": "aratrust:Ethics", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Ethics", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 60, |
|
"effective_num_docs": 60, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Illegal": { |
|
"name": "aratrust:Illegal", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Illegal", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 53, |
|
"effective_num_docs": 53, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:MentalHealth": { |
|
"name": "aratrust:MentalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "MentalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 76, |
|
"effective_num_docs": 76, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Offensive": { |
|
"name": "aratrust:Offensive", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Offensive", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 69, |
|
"effective_num_docs": 69, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth": { |
|
"name": "aratrust:PhysicalHealth", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "PhysicalHealth", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 73, |
|
"effective_num_docs": 73, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Privacy": { |
|
"name": "aratrust:Privacy", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Privacy", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 57, |
|
"effective_num_docs": 57, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Trustfulness": { |
|
"name": "aratrust:Trustfulness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Trustfulness", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 78, |
|
"effective_num_docs": 78, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|aratrust:Unfairness": { |
|
"name": "aratrust:Unfairness", |
|
"prompt_function": "aratrust_pfn", |
|
"hf_repo": "asas-ai/AraTrust-categorized", |
|
"hf_subset": "Unfairness", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 55, |
|
"effective_num_docs": 55, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)": { |
|
"name": "madinah_qa:Arabic Language (General)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (General)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 612, |
|
"effective_num_docs": 612, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)": { |
|
"name": "madinah_qa:Arabic Language (Grammar)", |
|
"prompt_function": "madinah_qa_pfn", |
|
"hf_repo": "MBZUAI/MadinahQA", |
|
"hf_subset": "Arabic Language (Grammar)", |
|
"metric": [ |
|
{ |
|
"metric_name": "acc_norm", |
|
"higher_is_better": true, |
|
"category": "8", |
|
"use_case": "1", |
|
"sample_level_fn": "compute", |
|
"corpus_level_fn": "mean" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"test" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"test" |
|
], |
|
"few_shots_split": [ |
|
"dev" |
|
], |
|
"few_shots_select": "sequential", |
|
"generation_size": -1, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 365, |
|
"effective_num_docs": 365, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
}, |
|
"community|alrage_qa": { |
|
"name": "alrage_qa", |
|
"prompt_function": "qa_prompt_arabic", |
|
"hf_repo": "OALL/ALRAGE", |
|
"hf_subset": null, |
|
"metric": [ |
|
{ |
|
"metric_name": "llm_as_judge", |
|
"higher_is_better": true, |
|
"category": "7", |
|
"use_case": "10", |
|
"sample_level_fn": "_sample_level_fn", |
|
"corpus_level_fn": "aggregate_scores" |
|
} |
|
], |
|
"hf_revision": null, |
|
"hf_filter": null, |
|
"hf_avail_splits": [ |
|
"train" |
|
], |
|
"trust_dataset": true, |
|
"evaluation_splits": [ |
|
"train" |
|
], |
|
"few_shots_split": null, |
|
"few_shots_select": null, |
|
"generation_size": 200, |
|
"generation_grammar": null, |
|
"stop_sequence": [], |
|
"num_samples": null, |
|
"suite": [ |
|
"community" |
|
], |
|
"original_num_docs": 2106, |
|
"effective_num_docs": 2106, |
|
"must_remove_duplicate_docs": false, |
|
"version": 0 |
|
} |
|
}, |
|
"summary_tasks": { |
|
"community|alghafa:mcq_exams_test_ar|0": { |
|
"hashes": { |
|
"hash_examples": "c07a5e78c5c0b8fe", |
|
"hash_full_prompts": "80a0e949a438f854", |
|
"hash_input_tokens": "d3f09a0aecce07bd", |
|
"hash_cont_tokens": "d6519f4be64b0b3f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 557, |
|
"padded": 2228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_dialects|0": { |
|
"hashes": { |
|
"hash_examples": "c0b6081f83e14064", |
|
"hash_full_prompts": "6b2d3701471e4b1f", |
|
"hash_input_tokens": "af2e62ee469ea076", |
|
"hash_cont_tokens": "fe549766a0ce738a" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5395, |
|
"padded": 21580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:meta_ar_msa|0": { |
|
"hashes": { |
|
"hash_examples": "64eb78a7c5b7484b", |
|
"hash_full_prompts": "1773c5ea6c776a5b", |
|
"hash_input_tokens": "8482bef5f7d17dc2", |
|
"hash_cont_tokens": "0349d70fe949b783" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3580, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { |
|
"hashes": { |
|
"hash_examples": "54fc3502c1c02c06", |
|
"hash_full_prompts": "4e908353c5547baa", |
|
"hash_input_tokens": "c56ad6d131b43e05", |
|
"hash_cont_tokens": "65b9e8001c0d9d5e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 150, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { |
|
"hashes": { |
|
"hash_examples": "46572d83696552ae", |
|
"hash_full_prompts": "349c75cbe8ae8c36", |
|
"hash_input_tokens": "227ed26d2ec36181", |
|
"hash_cont_tokens": "707d382e7333be99" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { |
|
"hashes": { |
|
"hash_examples": "f430d97ff715bc1c", |
|
"hash_full_prompts": "dc3ef65ebcc67ea0", |
|
"hash_input_tokens": "38b36dce00a606c1", |
|
"hash_cont_tokens": "b2739963cb832e04" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 150, |
|
"padded": 750, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { |
|
"hashes": { |
|
"hash_examples": "6b70a7416584f98c", |
|
"hash_full_prompts": "d322ffa50c152527", |
|
"hash_input_tokens": "8cede457341ab3b4", |
|
"hash_cont_tokens": "b5f274f703abc5b7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7995, |
|
"padded": 15990, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_rating_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "bc2005cc9d2f436e", |
|
"hash_full_prompts": "dd56faca891efde6", |
|
"hash_input_tokens": "725bb7569e174fd7", |
|
"hash_cont_tokens": "87c18aae5cfd812b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 5995, |
|
"padded": 17921, |
|
"non_padded": 64, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alghafa:multiple_choice_sentiment_task|0": { |
|
"hashes": { |
|
"hash_examples": "6fb0e254ea5945d8", |
|
"hash_full_prompts": "a24dad42606fd23e", |
|
"hash_input_tokens": "699956e623bcf18e", |
|
"hash_cont_tokens": "8e03dcc04ffbd0bd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1720, |
|
"padded": 5096, |
|
"non_padded": 64, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_exams|0": { |
|
"hashes": { |
|
"hash_examples": "6d721df351722656", |
|
"hash_full_prompts": "f4c27f32ed1304f6", |
|
"hash_input_tokens": "caf792eed509b00e", |
|
"hash_cont_tokens": "a4ef65bc7bab8dcf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 537, |
|
"padded": 2096, |
|
"non_padded": 52, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Accounting (University)|0": { |
|
"hashes": { |
|
"hash_examples": "30e09697562ff9e7", |
|
"hash_full_prompts": "bff0c6fb19ead57e", |
|
"hash_input_tokens": "f19a663735f9322d", |
|
"hash_cont_tokens": "587bf4caea1658f4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 74, |
|
"padded": 256, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "bef69fb8b3b75f28", |
|
"hash_full_prompts": "fc669ccce0759520", |
|
"hash_input_tokens": "6a28053a2b25a9db", |
|
"hash_cont_tokens": "95a234c727b7b43c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2400, |
|
"non_padded": 3, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "bd066a9e6a140a4b", |
|
"hash_full_prompts": "10bc873d02a1412a", |
|
"hash_input_tokens": "559baa6b68e29cd4", |
|
"hash_cont_tokens": "98f874e7446c544b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1545, |
|
"non_padded": 43, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a9c2cd9a9929292a", |
|
"hash_full_prompts": "6724c0709f54c0c8", |
|
"hash_input_tokens": "66c03f048b11322b", |
|
"hash_cont_tokens": "3d676d0d2f081e05" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 390, |
|
"padded": 1505, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "2f8a77bbbd0e21ff", |
|
"hash_full_prompts": "d217019db346cf56", |
|
"hash_input_tokens": "426a8959808010fa", |
|
"hash_cont_tokens": "322ea7667dfc2c2d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 105, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Arabic Language (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5eed3da47822539b", |
|
"hash_full_prompts": "281223ef2463441f", |
|
"hash_input_tokens": "9076d2aac9a47af6", |
|
"hash_cont_tokens": "f3c78f80ddea1519" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 252, |
|
"padded": 918, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Biology (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "91ae6d22a0f0213d", |
|
"hash_full_prompts": "a45c742b98c0bb8e", |
|
"hash_input_tokens": "50d4c0b395256808", |
|
"hash_cont_tokens": "aaa20fdc3c06d2c3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1409, |
|
"padded": 4968, |
|
"non_padded": 88, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "f27bf8791bea2bb9", |
|
"hash_full_prompts": "f45fd30a56e6ef8e", |
|
"hash_input_tokens": "932f832e4462b3e3", |
|
"hash_cont_tokens": "e02c7ebfec7f8df8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 312, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Civics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "74f5bb0098c8916f", |
|
"hash_full_prompts": "2e4ffd2c4ac72585", |
|
"hash_input_tokens": "8db16893ef456b21", |
|
"hash_cont_tokens": "1ffdd9a463183bfa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 236, |
|
"padded": 940, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "a4278d7b525d46fe", |
|
"hash_full_prompts": "d08187d0557a9b3a", |
|
"hash_input_tokens": "83f1a75aa6763f59", |
|
"hash_cont_tokens": "821feca3d9004c98" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 261, |
|
"padded": 994, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "0cb6c07e4b80dfd4", |
|
"hash_full_prompts": "625cae891db27ba4", |
|
"hash_input_tokens": "d75cea8611410ac5", |
|
"hash_cont_tokens": "8b4f299b6f012a83" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 27, |
|
"padded": 100, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "d96fc1bc32473533", |
|
"hash_full_prompts": "20d97795e82f33c2", |
|
"hash_input_tokens": "f7466637f31ff9e7", |
|
"hash_cont_tokens": "1bc67f97b48b9ece" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 190, |
|
"padded": 476, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Computer Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "8835587e436cbaff", |
|
"hash_full_prompts": "d382f30efaa2caf2", |
|
"hash_input_tokens": "5629a924d7f36d93", |
|
"hash_cont_tokens": "e9d871459bc85f62" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 64, |
|
"padded": 247, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Driving Test|0": { |
|
"hashes": { |
|
"hash_examples": "7a4c38a2c451d075", |
|
"hash_full_prompts": "d9e8596b43112533", |
|
"hash_input_tokens": "ba82b293dba08b96", |
|
"hash_cont_tokens": "cd411982b0f12d43" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1211, |
|
"padded": 3606, |
|
"non_padded": 79, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c04c252836601279", |
|
"hash_full_prompts": "dd18a221bd0b756a", |
|
"hash_input_tokens": "bf1f14184396c218", |
|
"hash_cont_tokens": "4bda66df90f2d4d8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 360, |
|
"padded": 1374, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "18fba1579406b3cc", |
|
"hash_full_prompts": "bd91e2e91c10dbfb", |
|
"hash_input_tokens": "d71738c696442e46", |
|
"hash_cont_tokens": "3ea283b0f50a72f5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 87, |
|
"padded": 344, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Economics (University)|0": { |
|
"hashes": { |
|
"hash_examples": "7c9e86fba8151562", |
|
"hash_full_prompts": "127e44dd5a0dc334", |
|
"hash_input_tokens": "4ca8786c57c41832", |
|
"hash_cont_tokens": "91cdb256248a5bdf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 137, |
|
"padded": 532, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "acfbe4e1f0314b85", |
|
"hash_full_prompts": "764f51eab95c84fc", |
|
"hash_input_tokens": "48663be3c89b2e30", |
|
"hash_cont_tokens": "76d704fbedbe5ab8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 864, |
|
"padded": 3169, |
|
"non_padded": 44, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "03cd0ecf10224316", |
|
"hash_full_prompts": "37ad2c5bf7870920", |
|
"hash_input_tokens": "aab36b0071c5ad3e", |
|
"hash_cont_tokens": "aff2aed9268be2e2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 172, |
|
"padded": 607, |
|
"non_padded": 21, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:General Knowledge (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "c3ee30196e05e122", |
|
"hash_full_prompts": "80e56ef0c04ed68c", |
|
"hash_input_tokens": "47da26c3a042f354", |
|
"hash_cont_tokens": "6c8978669cdc11fb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 162, |
|
"padded": 629, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "e2e329d2bdd9fb7b", |
|
"hash_full_prompts": "a4eb00d949311057", |
|
"hash_input_tokens": "ee3deab0edce6837", |
|
"hash_cont_tokens": "37e2e9c548d8c904" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1038, |
|
"padded": 4052, |
|
"non_padded": 64, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "420b161444291989", |
|
"hash_full_prompts": "a66bba4fc02faba0", |
|
"hash_input_tokens": "f9ff3eb87e5deed2", |
|
"hash_cont_tokens": "5e24bb4c8be23901" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 272, |
|
"padded": 966, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Geography (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "5bc5ca48a4210899", |
|
"hash_full_prompts": "cbda4d0ffc67d06b", |
|
"hash_input_tokens": "20889bf20f5be9ab", |
|
"hash_cont_tokens": "b9f45957a97d1ecf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 57, |
|
"padded": 216, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "c7cc37f29311bea1", |
|
"hash_full_prompts": "88724da4a11d438f", |
|
"hash_input_tokens": "ccc46ee6e761b396", |
|
"hash_cont_tokens": "b16e65544485acae" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 760, |
|
"padded": 2886, |
|
"non_padded": 76, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "5b9f1973337153a2", |
|
"hash_full_prompts": "2d98d366c39f32a8", |
|
"hash_input_tokens": "1783ce613c2b280f", |
|
"hash_cont_tokens": "e3b355a58a286ee4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 203, |
|
"padded": 734, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:History (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "af2469847007c1fe", |
|
"hash_full_prompts": "df3b23effab097a0", |
|
"hash_input_tokens": "18271792cdfb95ee", |
|
"hash_cont_tokens": "a52a22630c3cb3f7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies|0": { |
|
"hashes": { |
|
"hash_examples": "c8da9b2f16a5ea0f", |
|
"hash_full_prompts": "a8a474da3abfdbdb", |
|
"hash_input_tokens": "6511dc520e600e02", |
|
"hash_cont_tokens": "1866597a67ff4424" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 639, |
|
"padded": 2493, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "efb11bc8ef398117", |
|
"hash_full_prompts": "9165dd4c8e696ae0", |
|
"hash_input_tokens": "1938010fd9bb2a9c", |
|
"hash_cont_tokens": "6b678abb2fd451bd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 334, |
|
"padded": 1281, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "9e33ab030eebdb99", |
|
"hash_full_prompts": "e4fb7447e1b17c38", |
|
"hash_input_tokens": "321fe481eb67b680", |
|
"hash_cont_tokens": "e0c922e595ad51cd" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 867, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Islamic Studies (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "4167565d878b20eb", |
|
"hash_full_prompts": "2bf9da9ac8b969b6", |
|
"hash_input_tokens": "65cb81f2b9c7ca94", |
|
"hash_cont_tokens": "97e2182a63c5686c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 999, |
|
"padded": 2969, |
|
"non_padded": 55, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Law (Professional)|0": { |
|
"hashes": { |
|
"hash_examples": "e77f52c8fe4352b3", |
|
"hash_full_prompts": "35364a07a6ccb7d0", |
|
"hash_input_tokens": "ac7f8f951acf94e3", |
|
"hash_cont_tokens": "324cc46c561b417c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 314, |
|
"padded": 1223, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Management (University)|0": { |
|
"hashes": { |
|
"hash_examples": "09682649b04b7327", |
|
"hash_full_prompts": "1f5a262586618a50", |
|
"hash_input_tokens": "3158a0839753b276", |
|
"hash_cont_tokens": "1e98e1e2cd19a5e3" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 75, |
|
"padded": 200, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Math (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "edb027bfae7e76f1", |
|
"hash_full_prompts": "f41fb4acbf7affae", |
|
"hash_input_tokens": "8614b42d543d9d7d", |
|
"hash_cont_tokens": "632401a080490684" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 409, |
|
"padded": 1290, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "96e72c9094c2364c", |
|
"hash_full_prompts": "d0f723d3ce6ad0de", |
|
"hash_input_tokens": "c145150a9b84ec44", |
|
"hash_cont_tokens": "17e42af5dbb9eee1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 242, |
|
"padded": 924, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Natural Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "69e35bad3dec5a4d", |
|
"hash_full_prompts": "2892d842afda3be6", |
|
"hash_input_tokens": "038e0bae167d7888", |
|
"hash_cont_tokens": "a7423721c9837336" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 336, |
|
"padded": 1206, |
|
"non_padded": 22, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Philosophy (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "dc6ebd484a02fca5", |
|
"hash_full_prompts": "3a82da9c1ad5b47d", |
|
"hash_input_tokens": "36b688f91c5e7c88", |
|
"hash_cont_tokens": "69b31fc6977897bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 39, |
|
"padded": 156, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Physics (High School)|0": { |
|
"hashes": { |
|
"hash_examples": "58a1722472c9e644", |
|
"hash_full_prompts": "085146655d8ef6dc", |
|
"hash_input_tokens": "e5e97832aef238d3", |
|
"hash_cont_tokens": "f9506aa86f66954d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 255, |
|
"padded": 996, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Political Science (University)|0": { |
|
"hashes": { |
|
"hash_examples": "07a4ed6aabbdfd1e", |
|
"hash_full_prompts": "c179a3822417b264", |
|
"hash_input_tokens": "3b48d5302a0061f7", |
|
"hash_cont_tokens": "4799b66f49438465" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 210, |
|
"padded": 688, |
|
"non_padded": 22, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Middle School)|0": { |
|
"hashes": { |
|
"hash_examples": "8ca955902f304664", |
|
"hash_full_prompts": "7fc55350d4083674", |
|
"hash_input_tokens": "187f1a2c21b68ad2", |
|
"hash_cont_tokens": "4602cb88db99312d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 241, |
|
"padded": 919, |
|
"non_padded": 10, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu:Social Science (Primary School)|0": { |
|
"hashes": { |
|
"hash_examples": "934025ab3738123c", |
|
"hash_full_prompts": "ad76f074b992e2e0", |
|
"hash_input_tokens": "37268767c91109b4", |
|
"hash_cont_tokens": "19e973e9f05c9c82" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 705, |
|
"padded": 2004, |
|
"non_padded": 39, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:abstract_algebra|0": { |
|
"hashes": { |
|
"hash_examples": "0b557911f2f6d919", |
|
"hash_full_prompts": "e69f74a47413aebb", |
|
"hash_input_tokens": "6e711cb5c57d1c57", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:anatomy|0": { |
|
"hashes": { |
|
"hash_examples": "a552d8a0ef294061", |
|
"hash_full_prompts": "9a06cc1bb7b5b499", |
|
"hash_input_tokens": "c17e0394c0aea014", |
|
"hash_cont_tokens": "96c000fa61c3bd55" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 135, |
|
"padded": 532, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:astronomy|0": { |
|
"hashes": { |
|
"hash_examples": "c4a372d0af7da098", |
|
"hash_full_prompts": "a94afb4ca0c2d256", |
|
"hash_input_tokens": "36eca2178818df25", |
|
"hash_cont_tokens": "b13cc32205751d90" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 152, |
|
"padded": 604, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:business_ethics|0": { |
|
"hashes": { |
|
"hash_examples": "9f71d816abf8af7a", |
|
"hash_full_prompts": "34d3001ccccff521", |
|
"hash_input_tokens": "8dac1fd007a7a7d2", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:clinical_knowledge|0": { |
|
"hashes": { |
|
"hash_examples": "38303cd765589ef3", |
|
"hash_full_prompts": "1556725f5b45ac71", |
|
"hash_input_tokens": "6f09b9e81271085d", |
|
"hash_cont_tokens": "c771582839d4f30c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 265, |
|
"padded": 1028, |
|
"non_padded": 32, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_biology|0": { |
|
"hashes": { |
|
"hash_examples": "dbd9b5d318e60b04", |
|
"hash_full_prompts": "78f4e9cb9660d3ca", |
|
"hash_input_tokens": "303a9463bf135a83", |
|
"hash_cont_tokens": "ec774ac0d0ad658b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 144, |
|
"padded": 572, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "6f88491d03db8a4c", |
|
"hash_full_prompts": "2d0102fc2e009e99", |
|
"hash_input_tokens": "a1e7811e96d839eb", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "ebfdee5ef2ed5e17", |
|
"hash_full_prompts": "aacfd01dc053d008", |
|
"hash_input_tokens": "85ebfd91849ebff8", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "e3f22cd7712aae2f", |
|
"hash_full_prompts": "86a3d00a93e44a42", |
|
"hash_input_tokens": "82f54c08eedf34c6", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "51a5501373afb5a7", |
|
"hash_full_prompts": "2d611f649df6bd4c", |
|
"hash_input_tokens": "1b803023d54c7e31", |
|
"hash_cont_tokens": "1823a754e6394181" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 173, |
|
"padded": 680, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:college_physics|0": { |
|
"hashes": { |
|
"hash_examples": "2d3e015989b108db", |
|
"hash_full_prompts": "8206c1d53c258116", |
|
"hash_input_tokens": "cf2eb5f1af7c54eb", |
|
"hash_cont_tokens": "ee5dc873d27b9e10" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 102, |
|
"padded": 404, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:computer_security|0": { |
|
"hashes": { |
|
"hash_examples": "f8810eddc38dfee4", |
|
"hash_full_prompts": "aa67ba537bd6d9cb", |
|
"hash_input_tokens": "59a5a043345a3582", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 396, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:conceptual_physics|0": { |
|
"hashes": { |
|
"hash_examples": "211e32cc43c6b1dc", |
|
"hash_full_prompts": "5b26750c0a54b21a", |
|
"hash_input_tokens": "d8bd540a3ef6a733", |
|
"hash_cont_tokens": "b7b580bbcf7e0afa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 235, |
|
"padded": 896, |
|
"non_padded": 44, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:econometrics|0": { |
|
"hashes": { |
|
"hash_examples": "810023786b2484d2", |
|
"hash_full_prompts": "1554f608d8cbebad", |
|
"hash_input_tokens": "e106e89bdd24d92b", |
|
"hash_cont_tokens": "d44932b2a931e093" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 114, |
|
"padded": 452, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:electrical_engineering|0": { |
|
"hashes": { |
|
"hash_examples": "a222760c93eaa1ee", |
|
"hash_full_prompts": "69acb104ef4c55c2", |
|
"hash_input_tokens": "2c5165427788a83e", |
|
"hash_cont_tokens": "159f4cb1232d2a3c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 145, |
|
"padded": 544, |
|
"non_padded": 36, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:elementary_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "4c069aeee64dc227", |
|
"hash_full_prompts": "f53b58db6fd4b44a", |
|
"hash_input_tokens": "62ffa13c49ee7cc1", |
|
"hash_cont_tokens": "2bf44b70baf49dfa" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 378, |
|
"padded": 1500, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:formal_logic|0": { |
|
"hashes": { |
|
"hash_examples": "3cb0ccbf8e8a77ae", |
|
"hash_full_prompts": "c4a28d323f6cb522", |
|
"hash_input_tokens": "11aacf903a622bce", |
|
"hash_cont_tokens": "8765c45f4711ebb8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 126, |
|
"padded": 504, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:global_facts|0": { |
|
"hashes": { |
|
"hash_examples": "c1d039e64ea321b9", |
|
"hash_full_prompts": "e5fed902d9227552", |
|
"hash_input_tokens": "d873ca93fd6c6b91", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 388, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_biology|0": { |
|
"hashes": { |
|
"hash_examples": "ddcb8237bb4ba08a", |
|
"hash_full_prompts": "719c7f7a3c6baac4", |
|
"hash_input_tokens": "af59b8d4bceed933", |
|
"hash_cont_tokens": "49908817551a4513" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3813, |
|
"padded": 15100, |
|
"non_padded": 152, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_chemistry|0": { |
|
"hashes": { |
|
"hash_examples": "07061b55c5c436d9", |
|
"hash_full_prompts": "1527546cad57c6e8", |
|
"hash_input_tokens": "9ff0cdee4c281bd4", |
|
"hash_cont_tokens": "a7f16a586e1cfe0f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4016, |
|
"padded": 15912, |
|
"non_padded": 152, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_computer_science|0": { |
|
"hashes": { |
|
"hash_examples": "8d3405483d5fdcff", |
|
"hash_full_prompts": "52bddb525d63cfb6", |
|
"hash_input_tokens": "e868dd410413fc05", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 400, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_european_history|0": { |
|
"hashes": { |
|
"hash_examples": "031c49a430356414", |
|
"hash_full_prompts": "a776b57861a8f770", |
|
"hash_input_tokens": "ecb5dc4cd387224f", |
|
"hash_cont_tokens": "5420388845898571" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 8152, |
|
"padded": 32448, |
|
"non_padded": 160, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_geography|0": { |
|
"hashes": { |
|
"hash_examples": "d0ce2b019a66c1de", |
|
"hash_full_prompts": "c2feec8be4e1ff30", |
|
"hash_input_tokens": "9e47f5696a52bb45", |
|
"hash_cont_tokens": "fa4a2c8384dfaaa5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 198, |
|
"padded": 768, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_government_and_politics|0": { |
|
"hashes": { |
|
"hash_examples": "7d7c6d476d0576b1", |
|
"hash_full_prompts": "a425a9deee8f47b5", |
|
"hash_input_tokens": "24eba74d60fc749f", |
|
"hash_cont_tokens": "682709d2fa91c75e" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 193, |
|
"padded": 768, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_macroeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "694d3a01c6144ddb", |
|
"hash_full_prompts": "ecbc56e8841a440f", |
|
"hash_input_tokens": "456bf4cb3de8cf06", |
|
"hash_cont_tokens": "4f2f97c723cb220f" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2891, |
|
"padded": 11440, |
|
"non_padded": 124, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_mathematics|0": { |
|
"hashes": { |
|
"hash_examples": "004f9c0a40b5ec10", |
|
"hash_full_prompts": "b062392cb5100ded", |
|
"hash_input_tokens": "3bd4bc291deaac3a", |
|
"hash_cont_tokens": "8130a825e5a2ee3d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 270, |
|
"padded": 1072, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_microeconomics|0": { |
|
"hashes": { |
|
"hash_examples": "80cf03d462e6ccbc", |
|
"hash_full_prompts": "85d5dfb962fc6d98", |
|
"hash_input_tokens": "00a458eaf4c3905d", |
|
"hash_cont_tokens": "4f6974070ef28d29" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 238, |
|
"padded": 948, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_physics|0": { |
|
"hashes": { |
|
"hash_examples": "92218def5b383845", |
|
"hash_full_prompts": "1c8559c031860760", |
|
"hash_input_tokens": "c834d7f0dd79fc04", |
|
"hash_cont_tokens": "5d32bcd7ba8252ba" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 151, |
|
"padded": 604, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "323f7848fee32e58", |
|
"hash_full_prompts": "8035a60e22c7171e", |
|
"hash_input_tokens": "dfc509c226d58c54", |
|
"hash_cont_tokens": "1512a6938229952b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 545, |
|
"padded": 2156, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_statistics|0": { |
|
"hashes": { |
|
"hash_examples": "d7bbe0d037cf31ec", |
|
"hash_full_prompts": "6158ad415c6366aa", |
|
"hash_input_tokens": "1dce3c49e184539d", |
|
"hash_cont_tokens": "95cb29e5c31221c8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4232, |
|
"padded": 16776, |
|
"non_padded": 152, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_us_history|0": { |
|
"hashes": { |
|
"hash_examples": "722ec9207e3b0e04", |
|
"hash_full_prompts": "5b4434e41ee5ad44", |
|
"hash_input_tokens": "ecb340482ee154f1", |
|
"hash_cont_tokens": "0c31c2de1e3429bf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 204, |
|
"padded": 816, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:high_school_world_history|0": { |
|
"hashes": { |
|
"hash_examples": "b5eb675d3b578584", |
|
"hash_full_prompts": "9734067c4bba2d90", |
|
"hash_input_tokens": "46ab46716dc915b5", |
|
"hash_cont_tokens": "5e704d9d54138833" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 237, |
|
"padded": 948, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_aging|0": { |
|
"hashes": { |
|
"hash_examples": "713ac79cd2dd2d7b", |
|
"hash_full_prompts": "a7abe635cf733537", |
|
"hash_input_tokens": "e18a38481612d32f", |
|
"hash_cont_tokens": "e5a3e63957647f04" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 223, |
|
"padded": 868, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:human_sexuality|0": { |
|
"hashes": { |
|
"hash_examples": "47551ab4e5dcf6c5", |
|
"hash_full_prompts": "1f816ff56cc17aa6", |
|
"hash_input_tokens": "db0c5bad1e83ddeb", |
|
"hash_cont_tokens": "90a9b6d1231332f4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 131, |
|
"padded": 512, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:international_law|0": { |
|
"hashes": { |
|
"hash_examples": "da360336943398d5", |
|
"hash_full_prompts": "fb06b1e1341ef7fd", |
|
"hash_input_tokens": "38228e160d4d0a2d", |
|
"hash_cont_tokens": "9ab33ab519d55748" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 121, |
|
"padded": 484, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:jurisprudence|0": { |
|
"hashes": { |
|
"hash_examples": "661d161a486fb035", |
|
"hash_full_prompts": "c1f348bac6d3e718", |
|
"hash_input_tokens": "c8da9c3ac91931b3", |
|
"hash_cont_tokens": "2cecb6db3790a23b" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 108, |
|
"padded": 432, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:logical_fallacies|0": { |
|
"hashes": { |
|
"hash_examples": "5c3926384758bda7", |
|
"hash_full_prompts": "0cde216cf861d61f", |
|
"hash_input_tokens": "9ab1c06e76ca065f", |
|
"hash_cont_tokens": "f5c60e363dd9fc3d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 163, |
|
"padded": 640, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:machine_learning|0": { |
|
"hashes": { |
|
"hash_examples": "3ce756e6a22ffc48", |
|
"hash_full_prompts": "2e571d1b5320c81f", |
|
"hash_input_tokens": "d62f3caae043891b", |
|
"hash_cont_tokens": "d41e7e44237c0a16" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 112, |
|
"padded": 444, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:management|0": { |
|
"hashes": { |
|
"hash_examples": "20fe769bb3276832", |
|
"hash_full_prompts": "addddee958040e66", |
|
"hash_input_tokens": "85f691fc9e7cbeca", |
|
"hash_cont_tokens": "372864196dbb4cad" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 103, |
|
"padded": 396, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:marketing|0": { |
|
"hashes": { |
|
"hash_examples": "6b19449559d987ce", |
|
"hash_full_prompts": "fcc1001dcd8658fe", |
|
"hash_input_tokens": "26f784a6ce977c5c", |
|
"hash_cont_tokens": "ad74b6b4e88f6100" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 234, |
|
"padded": 932, |
|
"non_padded": 4, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:medical_genetics|0": { |
|
"hashes": { |
|
"hash_examples": "cbb0fa9df0f5435a", |
|
"hash_full_prompts": "b9d85ecf87796575", |
|
"hash_input_tokens": "ea8dc4bea07cd647", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 384, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:miscellaneous|0": { |
|
"hashes": { |
|
"hash_examples": "0a4134046c23cff9", |
|
"hash_full_prompts": "d46469b4de083b50", |
|
"hash_input_tokens": "9bcf6b52695255e0", |
|
"hash_cont_tokens": "f2831dc319b7001c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 2420, |
|
"padded": 9580, |
|
"non_padded": 100, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_disputes|0": { |
|
"hashes": { |
|
"hash_examples": "1ac8a0967c82caa0", |
|
"hash_full_prompts": "7ccf1d3ae4eab339", |
|
"hash_input_tokens": "77bda4e75f885c2e", |
|
"hash_cont_tokens": "d6a32c4f89ec0e43" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 346, |
|
"padded": 1368, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:moral_scenarios|0": { |
|
"hashes": { |
|
"hash_examples": "2c0670188bc5a789", |
|
"hash_full_prompts": "44466154f6fbec71", |
|
"hash_input_tokens": "0033e9e48b76b58d", |
|
"hash_cont_tokens": "0abad6841e9b5dc1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 895, |
|
"padded": 3504, |
|
"non_padded": 76, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:nutrition|0": { |
|
"hashes": { |
|
"hash_examples": "658628c0dcdfe201", |
|
"hash_full_prompts": "2ebbbdaea80d9c0f", |
|
"hash_input_tokens": "40140e9accd9cd7c", |
|
"hash_cont_tokens": "1947ff415070dfa5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 306, |
|
"padded": 1208, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:philosophy|0": { |
|
"hashes": { |
|
"hash_examples": "8b6707b322affafd", |
|
"hash_full_prompts": "72b1804dd62e9842", |
|
"hash_input_tokens": "19c0e79bd4b4e0e2", |
|
"hash_cont_tokens": "566ed263a8423f58" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 311, |
|
"padded": 1224, |
|
"non_padded": 20, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:prehistory|0": { |
|
"hashes": { |
|
"hash_examples": "0c85ffcdc9a7b367", |
|
"hash_full_prompts": "70d2d35428e8ed51", |
|
"hash_input_tokens": "5f782fa8675294ab", |
|
"hash_cont_tokens": "69725bb3099f23d0" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 324, |
|
"padded": 1272, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_accounting|0": { |
|
"hashes": { |
|
"hash_examples": "cce1ea2d5f544b2f", |
|
"hash_full_prompts": "1bef23608081ff7b", |
|
"hash_input_tokens": "747b2bc31216794b", |
|
"hash_cont_tokens": "25802ac32c51a7f7" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 4514, |
|
"padded": 17819, |
|
"non_padded": 237, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_law|0": { |
|
"hashes": { |
|
"hash_examples": "1c654b024b54eb4b", |
|
"hash_full_prompts": "6525f44784846cab", |
|
"hash_input_tokens": "e8e5c5a639074d57", |
|
"hash_cont_tokens": "7f2b1b7218a1ef40" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 7987, |
|
"padded": 31596, |
|
"non_padded": 352, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_medicine|0": { |
|
"hashes": { |
|
"hash_examples": "c621eaacfa662ebc", |
|
"hash_full_prompts": "7875238389eeca83", |
|
"hash_input_tokens": "01a33e8e7758db74", |
|
"hash_cont_tokens": "e1afe1503a5d02c5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 1637, |
|
"padded": 6476, |
|
"non_padded": 72, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:professional_psychology|0": { |
|
"hashes": { |
|
"hash_examples": "bc14a28eaec87dc4", |
|
"hash_full_prompts": "3e80ba9be5ad5629", |
|
"hash_input_tokens": "f7e1824312d780aa", |
|
"hash_cont_tokens": "1c41f3eeadeec685" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 3503, |
|
"padded": 13760, |
|
"non_padded": 252, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:public_relations|0": { |
|
"hashes": { |
|
"hash_examples": "de4989d9375885c4", |
|
"hash_full_prompts": "79e175750959cd9e", |
|
"hash_input_tokens": "f4ccfcb9cfbde82c", |
|
"hash_cont_tokens": "3914ab4a5d5b69e8" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 110, |
|
"padded": 432, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:security_studies|0": { |
|
"hashes": { |
|
"hash_examples": "3f84bfeec717c6de", |
|
"hash_full_prompts": "5bf146758e2e0263", |
|
"hash_input_tokens": "eb687352b99682d9", |
|
"hash_cont_tokens": "1d2b199df736dea4" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 245, |
|
"padded": 972, |
|
"non_padded": 8, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:sociology|0": { |
|
"hashes": { |
|
"hash_examples": "10d7c2fae10bfcbc", |
|
"hash_full_prompts": "fa261fced60e82fc", |
|
"hash_input_tokens": "ad9817584d4b06b5", |
|
"hash_cont_tokens": "3556cb090eda6dec" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 201, |
|
"padded": 788, |
|
"non_padded": 16, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:us_foreign_policy|0": { |
|
"hashes": { |
|
"hash_examples": "bb05f02c38ddaf1a", |
|
"hash_full_prompts": "0dfe513238bd0061", |
|
"hash_input_tokens": "4543d4664e04df4b", |
|
"hash_cont_tokens": "d35519013f781909" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 100, |
|
"padded": 376, |
|
"non_padded": 24, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:virology|0": { |
|
"hashes": { |
|
"hash_examples": "290915a48884ede2", |
|
"hash_full_prompts": "694770ed73f5c26a", |
|
"hash_input_tokens": "3142244bcd5c70fb", |
|
"hash_cont_tokens": "cbf93f8f3bd5c82c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 166, |
|
"padded": 636, |
|
"non_padded": 28, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|arabic_mmlu_ht:world_religions|0": { |
|
"hashes": { |
|
"hash_examples": "91cc5451c7284f75", |
|
"hash_full_prompts": "71844a79ee0d8a3d", |
|
"hash_input_tokens": "5934735499cae450", |
|
"hash_cont_tokens": "b5fbc024ac54a858" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 171, |
|
"padded": 672, |
|
"non_padded": 12, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (General)|0": { |
|
"hashes": { |
|
"hash_examples": "25bf94d05f737b63", |
|
"hash_full_prompts": "c008c6a31878aba3", |
|
"hash_input_tokens": "b43a8e4b97ffdd55", |
|
"hash_cont_tokens": "05d3f2bc980e6cbb" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 612, |
|
"padded": 2394, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|madinah_qa:Arabic Language (Grammar)|0": { |
|
"hashes": { |
|
"hash_examples": "e65fe4df843f4380", |
|
"hash_full_prompts": "80f2c5dc9ce14f9f", |
|
"hash_input_tokens": "91b27fbe8e7d58f8", |
|
"hash_cont_tokens": "ac1327c8a93a78f2" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 365, |
|
"padded": 1581, |
|
"non_padded": 7, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Ethics|0": { |
|
"hashes": { |
|
"hash_examples": "5d32da36271c5eb4", |
|
"hash_full_prompts": "bf216386871f979e", |
|
"hash_input_tokens": "f690f29461a1e944", |
|
"hash_cont_tokens": "67fe5dc315ef723c" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 60, |
|
"padded": 180, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Illegal|0": { |
|
"hashes": { |
|
"hash_examples": "0c07f1f100f2d0e8", |
|
"hash_full_prompts": "e8685254a9800856", |
|
"hash_input_tokens": "d6551c85717af490", |
|
"hash_cont_tokens": "2cc82a58b4d87abc" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 53, |
|
"padded": 159, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:MentalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "8e5fc5c4704bd96b", |
|
"hash_full_prompts": "7bda37378742ecfe", |
|
"hash_input_tokens": "2fb6323bf541acf9", |
|
"hash_cont_tokens": "7b399d0f0a9124f1" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 76, |
|
"padded": 228, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Offensive|0": { |
|
"hashes": { |
|
"hash_examples": "5ad4369b7dc5de46", |
|
"hash_full_prompts": "5fb2efd23e5ac881", |
|
"hash_input_tokens": "1ae6e9359e2a8121", |
|
"hash_cont_tokens": "0cd5015bc3370adf" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 69, |
|
"padded": 207, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:PhysicalHealth|0": { |
|
"hashes": { |
|
"hash_examples": "dc2a632e2dcc86db", |
|
"hash_full_prompts": "bf54e2b8d0999424", |
|
"hash_input_tokens": "ceaed241535ff3d2", |
|
"hash_cont_tokens": "cb8655dcad91858d" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 73, |
|
"padded": 210, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Privacy|0": { |
|
"hashes": { |
|
"hash_examples": "295e35448a39e003", |
|
"hash_full_prompts": "e7c7f61380b9f5b3", |
|
"hash_input_tokens": "8049fc3116330485", |
|
"hash_cont_tokens": "7f23416c661e2ee5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 57, |
|
"padded": 162, |
|
"non_padded": 9, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Trustfulness|0": { |
|
"hashes": { |
|
"hash_examples": "e79ac1ea5439e623", |
|
"hash_full_prompts": "d3bde62ee380527f", |
|
"hash_input_tokens": "75bbb8b258a7b68e", |
|
"hash_cont_tokens": "ff874dba360c1ede" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 78, |
|
"padded": 228, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|aratrust:Unfairness|0": { |
|
"hashes": { |
|
"hash_examples": "4ac5dccbfbdc5077", |
|
"hash_full_prompts": "88e12633a1dbe81d", |
|
"hash_input_tokens": "a7928aaebb5f5acc", |
|
"hash_cont_tokens": "3e990fe3a474dbc5" |
|
}, |
|
"truncated": 0, |
|
"non_truncated": 55, |
|
"padded": 159, |
|
"non_padded": 6, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
}, |
|
"community|alrage_qa|0": { |
|
"hashes": { |
|
"hash_examples": "3edbbe22cabd4160", |
|
"hash_full_prompts": "91c8d5ed9f9796ea", |
|
"hash_input_tokens": "338c70b76f6963cd", |
|
"hash_cont_tokens": "2648dd87f1c883fe" |
|
}, |
|
"truncated": 2106, |
|
"non_truncated": 0, |
|
"padded": 2106, |
|
"non_padded": 0, |
|
"effective_few_shots": 0.0, |
|
"num_truncated_few_shots": 0 |
|
} |
|
}, |
|
"summary_general": { |
|
"hashes": { |
|
"hash_examples": "b8b3b49631adcc40", |
|
"hash_full_prompts": "a49e21e87f93468f", |
|
"hash_input_tokens": "32d81b169eeb90da", |
|
"hash_cont_tokens": "2ec3ba0ed11e4f71" |
|
}, |
|
"truncated": 219, |
|
"non_truncated": 91643, |
|
"padded": 333440, |
|
"non_padded": 3444, |
|
"num_truncated_few_shots": 0 |
|
} |
|
} |