open-r1-eval-leaderboard
/
eval_results
/mistralai
/Mixtral-8x7B-Instruct-v0.1
/main
/bbh
/results_2024-03-28T14-24-16.417420.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1227818.26736698, | |
"end_time": 1230138.972465043, | |
"total_evaluation_time_secondes": "2320.7050980629865", | |
"model_name": "mistralai/Mixtral-8x7B-Instruct-v0.1", | |
"model_sha": "1e637f2d7cb0a9d6fb1922f305cb784995190a83", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "87.49 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.26737967914438504, | |
"em_stderr": 0.032452428900352305, | |
"qem": 0.2994652406417112, | |
"qem_stderr": 0.033583935154759645, | |
"pem": 0.6470588235294118, | |
"pem_stderr": 0.03504019983419237, | |
"pqem": 0.6470588235294118, | |
"pqem_stderr": 0.03504019983419237, | |
"perfect_em": 0.26737967914438504, | |
"perfect_em_stderr": 0.032452428900352305 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.2, | |
"qem_stderr": 0.02534897002097908, | |
"pem": 0.216, | |
"pem_stderr": 0.02607865766373272, | |
"pqem": 0.64, | |
"pqem_stderr": 0.03041876402517499, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.016, | |
"em_stderr": 0.00795166118887434, | |
"qem": 0.156, | |
"qem_stderr": 0.022995023034068755, | |
"pem": 0.22, | |
"pem_stderr": 0.026251792824605834, | |
"pqem": 0.74, | |
"pqem_stderr": 0.027797315752644308, | |
"perfect_em": 0.016, | |
"perfect_em_stderr": 0.00795166118887434 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.308, | |
"qem_stderr": 0.029256928606501864, | |
"pem": 0.008, | |
"pem_stderr": 0.005645483676690174, | |
"pqem": 0.368, | |
"pqem_stderr": 0.03056207062099316, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.02, | |
"em_stderr": 0.008872139507342683, | |
"qem": 0.284, | |
"qem_stderr": 0.02857695873043741, | |
"pem": 0.056, | |
"pem_stderr": 0.014570697336899599, | |
"pqem": 0.448, | |
"pqem_stderr": 0.03151438761115355, | |
"perfect_em": 0.02, | |
"perfect_em_stderr": 0.008872139507342683 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.304, | |
"qem_stderr": 0.029150213374159677, | |
"pem": 0.0, | |
"pem_stderr": 0.0, | |
"pqem": 0.452, | |
"pqem_stderr": 0.03153986449255662, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.036, | |
"em_stderr": 0.011805655169278133, | |
"qem": 0.428, | |
"qem_stderr": 0.031355968923772626, | |
"pem": 0.072, | |
"pem_stderr": 0.016381005750490108, | |
"pqem": 0.632, | |
"pqem_stderr": 0.030562070620993163, | |
"perfect_em": 0.036, | |
"perfect_em_stderr": 0.011805655169278133 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.4779116465863454, | |
"qem_stderr": 0.03171903523348456, | |
"pem": 0.14457831325301204, | |
"pem_stderr": 0.022331395571821913, | |
"pqem": 0.7751004016064257, | |
"pqem_stderr": 0.02651230458673727, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.224, | |
"em_stderr": 0.02642136168734791, | |
"qem": 0.224, | |
"qem_stderr": 0.02642136168734791, | |
"pem": 0.648, | |
"pem_stderr": 0.030266288057359925, | |
"pqem": 0.648, | |
"pqem_stderr": 0.030266288057359925, | |
"perfect_em": 0.224, | |
"perfect_em_stderr": 0.02642136168734791 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.18, | |
"em_stderr": 0.024346890650293548, | |
"qem": 0.5, | |
"qem_stderr": 0.031686212526223896, | |
"pem": 0.232, | |
"pem_stderr": 0.02675007037486516, | |
"pqem": 0.62, | |
"pqem_stderr": 0.030760116042626046, | |
"perfect_em": 0.18, | |
"perfect_em_stderr": 0.024346890650293548 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.4475806451612903, | |
"qem_stderr": 0.03163891746142309, | |
"pem": 0.016129032258064516, | |
"pem_stderr": 0.008015391715832133, | |
"pqem": 0.6048387096774194, | |
"pqem_stderr": 0.03110702726972493, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.328, | |
"qem_stderr": 0.02975239182447538, | |
"pem": 0.012, | |
"pem_stderr": 0.0069003230236943, | |
"pqem": 0.564, | |
"pqem_stderr": 0.03142556706028129, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.6348314606741573, | |
"qem_stderr": 0.03619005678691266, | |
"pem": 0.0, | |
"pem_stderr": 0.0, | |
"pqem": 0.7584269662921348, | |
"pqem_stderr": 0.0321732161383325, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.216, | |
"em_stderr": 0.026078657663732727, | |
"qem": 0.22, | |
"qem_stderr": 0.02625179282460584, | |
"pem": 0.72, | |
"pem_stderr": 0.02845414827783232, | |
"pqem": 0.72, | |
"pqem_stderr": 0.02845414827783232, | |
"perfect_em": 0.216, | |
"perfect_em_stderr": 0.026078657663732727 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.016, | |
"em_stderr": 0.007951661188874328, | |
"qem": 0.36, | |
"qem_stderr": 0.030418764025174974, | |
"pem": 0.116, | |
"pem_stderr": 0.02029342980308387, | |
"pqem": 0.716, | |
"pqem_stderr": 0.028576958730437398, | |
"perfect_em": 0.016, | |
"perfect_em_stderr": 0.007951661188874328 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.044, | |
"em_stderr": 0.012997373846574964, | |
"qem": 0.148, | |
"qem_stderr": 0.022503547243806144, | |
"pem": 0.08, | |
"pem_stderr": 0.017192507941462983, | |
"pqem": 0.356, | |
"pqem_stderr": 0.03034368065715322, | |
"perfect_em": 0.044, | |
"perfect_em_stderr": 0.012997373846574964 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.08, | |
"em_stderr": 0.01719250794146297, | |
"qem": 0.144, | |
"qem_stderr": 0.022249407735450203, | |
"pem": 0.144, | |
"pem_stderr": 0.022249407735450207, | |
"pqem": 0.34, | |
"pqem_stderr": 0.030020073605457907, | |
"perfect_em": 0.08, | |
"perfect_em_stderr": 0.01719250794146297 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.088, | |
"em_stderr": 0.017953084777052892, | |
"qem": 0.208, | |
"qem_stderr": 0.02572139890141639, | |
"pem": 0.184, | |
"pem_stderr": 0.02455581299422256, | |
"pqem": 0.556, | |
"pqem_stderr": 0.031486849425545735, | |
"perfect_em": 0.088, | |
"perfect_em_stderr": 0.017953084777052892 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.06596553773024362, | |
"em_stderr": 0.010779079028954822, | |
"qem": 0.31509938850352803, | |
"qem_stderr": 0.028601160227500002, | |
"pem": 0.1953203427244716, | |
"pem_stderr": 0.018387589587902008, | |
"pqem": 0.5880791611725217, | |
"pqem_stderr": 0.03047560571162204, | |
"perfect_em": 0.06596553773024362, | |
"perfect_em_stderr": 0.010779079028954822 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "c3f54a56ef8b0c84", | |
"hash_input_tokens": "7e5e423a80ecc1ce", | |
"hash_cont_tokens": "ea7329191884f89a" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "c70878aae99812bd", | |
"hash_input_tokens": "6e0f2d719a606df2", | |
"hash_cont_tokens": "168747abf2ed1172" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "27580dbe8f42357e", | |
"hash_input_tokens": "28c4ba12a22d1cd2", | |
"hash_cont_tokens": "ea97fd2b4e8339e1" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "c1efb1382b3d5f26", | |
"hash_input_tokens": "f6262983a77b3c15", | |
"hash_cont_tokens": "27b384bb46462abe" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "841180f101f6ed2f", | |
"hash_input_tokens": "4ae5c84ac47f9c00", | |
"hash_cont_tokens": "7ee9208c42f0f391" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "d4ea65028e06515a", | |
"hash_input_tokens": "d12c34ec49852722", | |
"hash_cont_tokens": "3ea6682edd9ab88c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "e45ba419a02569cb", | |
"hash_input_tokens": "539d01f4b2e6a62e", | |
"hash_cont_tokens": "20fdf72b7bdf66c8" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "e7f9f6227ef6d091", | |
"hash_input_tokens": "ccf15e0353520262", | |
"hash_cont_tokens": "2c887402efd4ba5c" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "363cbc26d2694856", | |
"hash_input_tokens": "6f26d91169f0f9e7", | |
"hash_cont_tokens": "f534d356569141cc" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "e2c5ea75faa663be", | |
"hash_input_tokens": "9bc9fca7f0afa719", | |
"hash_cont_tokens": "6e10e99577f43f65" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "39c98a33af277e05", | |
"hash_input_tokens": "8fc43170201dbcbe", | |
"hash_cont_tokens": "13fad13af65299fa" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "61c199869236d1d0", | |
"hash_input_tokens": "ebfefff744f0c7a3", | |
"hash_cont_tokens": "cc08b8755b81929c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "70b02bc8db0a7a32", | |
"hash_input_tokens": "bae40895da8c14d5", | |
"hash_cont_tokens": "0320bef36bbf541f" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "6a6bb045bbf84268", | |
"hash_input_tokens": "f8b065aed222d461", | |
"hash_cont_tokens": "5d577240f3fac015" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "95ddd530c1a01713", | |
"hash_input_tokens": "fe324a3e155b8686", | |
"hash_cont_tokens": "e993b67463ec16da" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "343105b81cd88d67", | |
"hash_input_tokens": "3aa53f00d4db548e", | |
"hash_cont_tokens": "9a61bd1425845d55" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "4b07759d13b7ab32", | |
"hash_input_tokens": "5a5a60741cae444d", | |
"hash_cont_tokens": "f8acae0c3d9c3a40" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "95a2d1f682b8e98c", | |
"hash_input_tokens": "667b9bc0a831893c", | |
"hash_cont_tokens": "13cc9c9f9aa887ea" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "cc928e928ada6335", | |
"hash_input_tokens": "fa03e97b7a0db38d", | |
"hash_cont_tokens": "4e1a33fb6e15f9bc" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |