open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/zephyr-7b-beta-ift
/v1.5
/bbh
/results_2024-03-19T09-27-11.722655.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 2786134.983495885, | |
"end_time": 2786411.547436556, | |
"total_evaluation_time_secondes": "276.5639406708069", | |
"model_name": "HuggingFaceH4/zephyr-7b-beta-ift", | |
"model_sha": "861515aa7e4d110e576effa7a4e7686ee0c6d807", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "13.99 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.43315508021390375, | |
"em_stderr": 0.03633267411102587, | |
"qem": 0.43315508021390375, | |
"qem_stderr": 0.03633267411102587, | |
"pem": 0.5614973262032086, | |
"pem_stderr": 0.03638341809400995, | |
"pqem": 0.5614973262032086, | |
"pqem_stderr": 0.03638341809400995, | |
"perfect_em": 0.43315508021390375, | |
"perfect_em_stderr": 0.03633267411102587 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.5, | |
"em_stderr": 0.031686212526223896, | |
"qem": 0.5, | |
"qem_stderr": 0.031686212526223896, | |
"pem": 0.5, | |
"pem_stderr": 0.031686212526223896, | |
"pqem": 0.584, | |
"pqem_stderr": 0.031235856237014553, | |
"perfect_em": 0.5, | |
"perfect_em_stderr": 0.031686212526223896 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.612, | |
"em_stderr": 0.03088103874899392, | |
"qem": 0.612, | |
"qem_stderr": 0.03088103874899392, | |
"pem": 0.612, | |
"pem_stderr": 0.03088103874899392, | |
"pqem": 0.716, | |
"pqem_stderr": 0.02857695873043741, | |
"perfect_em": 0.612, | |
"perfect_em_stderr": 0.03088103874899392 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.24, | |
"em_stderr": 0.027065293652239007, | |
"qem": 0.24, | |
"qem_stderr": 0.027065293652239007, | |
"pem": 0.24, | |
"pem_stderr": 0.027065293652239007, | |
"pqem": 0.24, | |
"pqem_stderr": 0.027065293652239007, | |
"perfect_em": 0.24, | |
"perfect_em_stderr": 0.027065293652239007 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.356, | |
"em_stderr": 0.030343680657153215, | |
"qem": 0.356, | |
"qem_stderr": 0.030343680657153215, | |
"pem": 0.356, | |
"pem_stderr": 0.030343680657153215, | |
"pqem": 0.448, | |
"pqem_stderr": 0.03151438761115355, | |
"perfect_em": 0.356, | |
"perfect_em_stderr": 0.030343680657153215 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.264, | |
"em_stderr": 0.027934518957690908, | |
"qem": 0.264, | |
"qem_stderr": 0.027934518957690908, | |
"pem": 0.264, | |
"pem_stderr": 0.027934518957690908, | |
"pqem": 0.368, | |
"pqem_stderr": 0.03056207062099316, | |
"perfect_em": 0.264, | |
"perfect_em_stderr": 0.027934518957690908 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.532, | |
"em_stderr": 0.031621252575725504, | |
"qem": 0.532, | |
"qem_stderr": 0.031621252575725504, | |
"pem": 0.532, | |
"pem_stderr": 0.031621252575725504, | |
"pqem": 0.74, | |
"pqem_stderr": 0.027797315752644304, | |
"perfect_em": 0.532, | |
"perfect_em_stderr": 0.031621252575725504 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.5261044176706827, | |
"em_stderr": 0.0317067307000546, | |
"qem": 0.5261044176706827, | |
"qem_stderr": 0.0317067307000546, | |
"pem": 0.5542168674698795, | |
"pem_stderr": 0.0315628233794839, | |
"pqem": 0.6265060240963856, | |
"pqem_stderr": 0.030716985021643353, | |
"perfect_em": 0.5261044176706827, | |
"perfect_em_stderr": 0.0317067307000546 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.588, | |
"em_stderr": 0.0311915960260229, | |
"qem": 0.588, | |
"qem_stderr": 0.0311915960260229, | |
"pem": 0.588, | |
"pem_stderr": 0.0311915960260229, | |
"pqem": 0.588, | |
"pqem_stderr": 0.0311915960260229, | |
"perfect_em": 0.588, | |
"perfect_em_stderr": 0.0311915960260229 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.176, | |
"em_stderr": 0.024133497525457123, | |
"qem": 0.176, | |
"qem_stderr": 0.024133497525457123, | |
"pem": 0.336, | |
"pem_stderr": 0.029933259094191516, | |
"pqem": 0.48, | |
"pqem_stderr": 0.031660853408495185, | |
"perfect_em": 0.176, | |
"perfect_em_stderr": 0.024133497525457123 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.49193548387096775, | |
"em_stderr": 0.031810099711288585, | |
"qem": 0.49193548387096775, | |
"qem_stderr": 0.031810099711288585, | |
"pem": 0.4959677419354839, | |
"pem_stderr": 0.0318132035898842, | |
"pqem": 0.5725806451612904, | |
"pqem_stderr": 0.031477261888285146, | |
"perfect_em": 0.49193548387096775, | |
"perfect_em_stderr": 0.031810099711288585 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.344, | |
"em_stderr": 0.030104503392316392, | |
"qem": 0.344, | |
"qem_stderr": 0.030104503392316392, | |
"pem": 0.344, | |
"pem_stderr": 0.030104503392316392, | |
"pqem": 0.484, | |
"pqem_stderr": 0.03166998503010742, | |
"perfect_em": 0.344, | |
"perfect_em_stderr": 0.030104503392316392 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.5786516853932584, | |
"em_stderr": 0.037114414059601884, | |
"qem": 0.5786516853932584, | |
"qem_stderr": 0.037114414059601884, | |
"pem": 0.5786516853932584, | |
"pem_stderr": 0.037114414059601884, | |
"pqem": 0.6067415730337079, | |
"pqem_stderr": 0.036715907095165826, | |
"perfect_em": 0.5786516853932584, | |
"perfect_em_stderr": 0.037114414059601884 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.256, | |
"em_stderr": 0.0276571087182049, | |
"qem": 0.256, | |
"qem_stderr": 0.0276571087182049, | |
"pem": 0.792, | |
"pem_stderr": 0.025721398901416392, | |
"pqem": 0.792, | |
"pqem_stderr": 0.025721398901416392, | |
"perfect_em": 0.256, | |
"perfect_em_stderr": 0.0276571087182049 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.112, | |
"em_stderr": 0.019985536939171433, | |
"qem": 0.112, | |
"qem_stderr": 0.019985536939171433, | |
"pem": 0.112, | |
"pem_stderr": 0.019985536939171433, | |
"pqem": 0.36, | |
"pqem_stderr": 0.03041876402517498, | |
"perfect_em": 0.112, | |
"perfect_em_stderr": 0.019985536939171433 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.164, | |
"em_stderr": 0.023465261002076757, | |
"qem": 0.164, | |
"qem_stderr": 0.023465261002076757, | |
"pem": 0.164, | |
"pem_stderr": 0.023465261002076757, | |
"pqem": 0.36, | |
"pqem_stderr": 0.030418764025174995, | |
"perfect_em": 0.164, | |
"perfect_em_stderr": 0.023465261002076757 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.1, | |
"em_stderr": 0.01901172751573437, | |
"qem": 0.1, | |
"qem_stderr": 0.01901172751573437, | |
"pem": 0.108, | |
"pem_stderr": 0.019669559381568755, | |
"pqem": 0.24, | |
"pqem_stderr": 0.027065293652239003, | |
"perfect_em": 0.1, | |
"perfect_em_stderr": 0.01901172751573437 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.376, | |
"em_stderr": 0.030696336267394594, | |
"qem": 0.376, | |
"qem_stderr": 0.030696336267394594, | |
"pem": 0.396, | |
"pem_stderr": 0.03099319785457785, | |
"pqem": 0.704, | |
"pqem_stderr": 0.028928939388379638, | |
"perfect_em": 0.376, | |
"perfect_em_stderr": 0.030696336267394594 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.3694359259527118, | |
"em_stderr": 0.029041193504798654, | |
"qem": 0.3694359259527118, | |
"qem_stderr": 0.029041193504798654, | |
"pem": 0.41857409005565727, | |
"pem_stderr": 0.0293038982684638, | |
"pqem": 0.5261847538052551, | |
"pqem_stderr": 0.030506724953366482, | |
"perfect_em": 0.3694359259527118, | |
"perfect_em_stderr": 0.029041193504798654 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "7303fa1d0fe0b29a", | |
"hash_input_tokens": "79663e73bb5ce6ac", | |
"hash_cont_tokens": "2ae104aa8f463042" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "69e60d10afa5a6f1", | |
"hash_input_tokens": "e9bd5760c58a1104", | |
"hash_cont_tokens": "d2ac72662c379649" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "ae0a8fd428f9aee3", | |
"hash_input_tokens": "b3625dcc25d708b2", | |
"hash_cont_tokens": "af88004d58a4a6e9" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "76633257f67207f9", | |
"hash_input_tokens": "c16e8768d8c9056f", | |
"hash_cont_tokens": "5d48a92cb57803c0" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "3c96645848786efd", | |
"hash_input_tokens": "915443ee37f164dc", | |
"hash_cont_tokens": "b04e092b2e3dc0a2" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "185c5851c101ee66", | |
"hash_input_tokens": "66d532c31ef57236", | |
"hash_cont_tokens": "a3aa21e4f1c88a0f" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "8ba2d94357e589d0", | |
"hash_input_tokens": "d51c6ad06efbf88b", | |
"hash_cont_tokens": "6ef2fb2696cb3189" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "a411e216d0f5f626", | |
"hash_input_tokens": "e17a3080d43ae54f", | |
"hash_cont_tokens": "915fcaefe88e41d0" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "ebb3084ecc78a46a", | |
"hash_input_tokens": "90854b0ca565c8f5", | |
"hash_cont_tokens": "1552b672978b1896" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "38328d016a4ebef3", | |
"hash_input_tokens": "b45b5a8a531e8bf5", | |
"hash_cont_tokens": "00c57b64676b3af7" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "9c7d0493c37182d6", | |
"hash_input_tokens": "627b6058879c9350", | |
"hash_cont_tokens": "d77427847a7d37ee" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "edccd4061b168b78", | |
"hash_input_tokens": "7d4d7e481ad8766b", | |
"hash_cont_tokens": "ae056dc903a003cd" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "31cafd95ab850a44", | |
"hash_input_tokens": "616900bacd0ba7ca", | |
"hash_cont_tokens": "0d6053ff9c4cd010" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "3d46581e9bbec2d0", | |
"hash_input_tokens": "8e9e99c22dd3a8d2", | |
"hash_cont_tokens": "59a36612e096323c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "4a54db144a5dd222", | |
"hash_input_tokens": "24789970b2290dd3", | |
"hash_cont_tokens": "966eba878f6e19f6" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "e3079106787cc311", | |
"hash_input_tokens": "9036045cff895b08", | |
"hash_cont_tokens": "f1d19d0b07292987" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "6364e5b860590ec8", | |
"hash_input_tokens": "7100c488aa0764ff", | |
"hash_cont_tokens": "3d4a342951cc3896" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "01aef56c4d1fe9fe", | |
"hash_input_tokens": "b9690a5d32a586fc", | |
"hash_cont_tokens": "4d169d3a16a49180" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "1cbeab0a00117cb8", | |
"hash_input_tokens": "3608679dab4ce40e", | |
"hash_cont_tokens": "dc02f96cd028fd57" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |