open-r1-eval-leaderboard
/
eval_results
/Qwen
/Qwen1.5-7B-Chat
/main
/bbh
/results_2024-03-28T13-49-45.748302.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 2069217.71399371, | |
"end_time": 2069504.513928286, | |
"total_evaluation_time_secondes": "286.7999345760327", | |
"model_name": "Qwen/Qwen1.5-7B-Chat", | |
"model_sha": "a2662f4bc1afe913a91cd49f794d229a8c28f97e", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "14.88 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.5240641711229946, | |
"em_stderr": 0.03661929361528703, | |
"qem": 0.5240641711229946, | |
"qem_stderr": 0.03661929361528703, | |
"pem": 0.5240641711229946, | |
"pem_stderr": 0.03661929361528703, | |
"pqem": 0.5240641711229946, | |
"pqem_stderr": 0.03661929361528703, | |
"perfect_em": 0.5240641711229946, | |
"perfect_em_stderr": 0.03661929361528703 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.176, | |
"em_stderr": 0.024133497525457116, | |
"qem": 0.176, | |
"qem_stderr": 0.024133497525457116, | |
"pem": 0.22, | |
"pem_stderr": 0.026251792824605845, | |
"pqem": 0.4, | |
"pqem_stderr": 0.031046021028253247, | |
"perfect_em": 0.176, | |
"perfect_em_stderr": 0.024133497525457116 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.404, | |
"em_stderr": 0.031096688184825295, | |
"qem": 0.404, | |
"qem_stderr": 0.031096688184825295, | |
"pem": 0.484, | |
"pem_stderr": 0.03166998503010742, | |
"pqem": 0.776, | |
"pqem_stderr": 0.0264213616873479, | |
"perfect_em": 0.404, | |
"perfect_em_stderr": 0.031096688184825295 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.232, | |
"em_stderr": 0.02675007037486516, | |
"qem": 0.232, | |
"qem_stderr": 0.02675007037486516, | |
"pem": 0.252, | |
"pem_stderr": 0.027513851933031352, | |
"pqem": 0.252, | |
"pqem_stderr": 0.027513851933031352, | |
"perfect_em": 0.232, | |
"perfect_em_stderr": 0.02675007037486516 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.38, | |
"em_stderr": 0.030760116042626046, | |
"qem": 0.38, | |
"qem_stderr": 0.030760116042626046, | |
"pem": 0.436, | |
"pem_stderr": 0.03142556706028128, | |
"pqem": 0.572, | |
"pqem_stderr": 0.0313559689237726, | |
"perfect_em": 0.38, | |
"perfect_em_stderr": 0.030760116042626046 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.284, | |
"em_stderr": 0.028576958730437405, | |
"qem": 0.284, | |
"qem_stderr": 0.028576958730437405, | |
"pem": 0.38, | |
"pem_stderr": 0.03076011604262603, | |
"pqem": 0.488, | |
"pqem_stderr": 0.03167708558254709, | |
"perfect_em": 0.284, | |
"perfect_em_stderr": 0.028576958730437405 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.476, | |
"em_stderr": 0.03164968895968782, | |
"qem": 0.476, | |
"qem_stderr": 0.03164968895968782, | |
"pem": 0.52, | |
"pem_stderr": 0.03166085340849519, | |
"pqem": 0.756, | |
"pqem_stderr": 0.027217995464553182, | |
"perfect_em": 0.476, | |
"perfect_em_stderr": 0.03164968895968782 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.3092369477911647, | |
"em_stderr": 0.02934839790043843, | |
"qem": 0.3092369477911647, | |
"qem_stderr": 0.02934839790043843, | |
"pem": 0.5863453815261044, | |
"pem_stderr": 0.031273022170585044, | |
"pqem": 0.678714859437751, | |
"pqem_stderr": 0.029652625884384973, | |
"perfect_em": 0.3092369477911647, | |
"perfect_em_stderr": 0.02934839790043843 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.6, | |
"em_stderr": 0.03104602102825325, | |
"qem": 0.6, | |
"qem_stderr": 0.03104602102825325, | |
"pem": 0.6, | |
"pem_stderr": 0.03104602102825325, | |
"pqem": 0.6, | |
"pqem_stderr": 0.03104602102825325, | |
"perfect_em": 0.6, | |
"perfect_em_stderr": 0.03104602102825325 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.128, | |
"em_stderr": 0.021172081336336506, | |
"qem": 0.128, | |
"qem_stderr": 0.021172081336336506, | |
"pem": 0.304, | |
"pem_stderr": 0.029150213374159677, | |
"pqem": 0.428, | |
"pqem_stderr": 0.03135596892377261, | |
"perfect_em": 0.128, | |
"perfect_em_stderr": 0.021172081336336506 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.3709677419354839, | |
"em_stderr": 0.030736616282226906, | |
"qem": 0.3709677419354839, | |
"qem_stderr": 0.030736616282226906, | |
"pem": 0.40725806451612906, | |
"pem_stderr": 0.03126217550035507, | |
"pqem": 0.5685483870967742, | |
"pqem_stderr": 0.03151383724269122, | |
"perfect_em": 0.3709677419354839, | |
"perfect_em_stderr": 0.030736616282226906 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.412, | |
"em_stderr": 0.031191596026022894, | |
"qem": 0.412, | |
"qem_stderr": 0.031191596026022894, | |
"pem": 0.412, | |
"pem_stderr": 0.031191596026022894, | |
"pqem": 0.536, | |
"pqem_stderr": 0.03160397514522374, | |
"perfect_em": 0.412, | |
"perfect_em_stderr": 0.031191596026022894 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.5561797752808989, | |
"em_stderr": 0.03734431584194243, | |
"qem": 0.5561797752808989, | |
"qem_stderr": 0.03734431584194243, | |
"pem": 0.5561797752808989, | |
"pem_stderr": 0.03734431584194243, | |
"pqem": 0.7134831460674157, | |
"pqem_stderr": 0.033984455265461594, | |
"perfect_em": 0.5561797752808989, | |
"perfect_em_stderr": 0.03734431584194243 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.548, | |
"em_stderr": 0.03153986449255663, | |
"qem": 0.548, | |
"qem_stderr": 0.03153986449255663, | |
"pem": 0.56, | |
"pem_stderr": 0.031457244522235625, | |
"pqem": 0.56, | |
"pqem_stderr": 0.031457244522235625, | |
"perfect_em": 0.548, | |
"perfect_em_stderr": 0.03153986449255663 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.284, | |
"em_stderr": 0.028576958730437394, | |
"qem": 0.284, | |
"qem_stderr": 0.028576958730437394, | |
"pem": 0.44, | |
"pem_stderr": 0.031457244522235715, | |
"pqem": 0.604, | |
"pqem_stderr": 0.030993197854577853, | |
"perfect_em": 0.284, | |
"perfect_em_stderr": 0.028576958730437394 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.064, | |
"em_stderr": 0.015510587134374155, | |
"qem": 0.064, | |
"qem_stderr": 0.015510587134374155, | |
"pem": 0.156, | |
"pem_stderr": 0.022995023034068755, | |
"pqem": 0.348, | |
"pqem_stderr": 0.03018656846451171, | |
"perfect_em": 0.064, | |
"perfect_em_stderr": 0.015510587134374155 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.104, | |
"em_stderr": 0.019345100974843897, | |
"qem": 0.104, | |
"qem_stderr": 0.019345100974843897, | |
"pem": 0.124, | |
"pem_stderr": 0.02088638225867326, | |
"pqem": 0.26, | |
"pqem_stderr": 0.027797315752644304, | |
"perfect_em": 0.104, | |
"perfect_em_stderr": 0.019345100974843897 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.172, | |
"em_stderr": 0.023915513944486218, | |
"qem": 0.172, | |
"qem_stderr": 0.023915513944486218, | |
"pem": 0.352, | |
"pem_stderr": 0.03026628805735993, | |
"pqem": 0.664, | |
"pqem_stderr": 0.029933259094191516, | |
"perfect_em": 0.172, | |
"perfect_em_stderr": 0.023915513944486218 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.33469159089614126, | |
"em_stderr": 0.028295187062505816, | |
"qem": 0.33469159089614126, | |
"qem_stderr": 0.028295187062505816, | |
"pem": 0.40632485513589595, | |
"pem_stderr": 0.03023505479168477, | |
"pqem": 0.5404894757624965, | |
"pqem_stderr": 0.03063200263404116, | |
"perfect_em": 0.33469159089614126, | |
"perfect_em_stderr": 0.028295187062505816 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "148a4c45a8d2b858", | |
"hash_input_tokens": "41f3903cf7efd5be", | |
"hash_cont_tokens": "3b4e0e3575a9fe46" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "e79a3237877b106e", | |
"hash_input_tokens": "ae3f6744fd7add8b", | |
"hash_cont_tokens": "e8ae17ee5b188129" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "9458fa1926b438bb", | |
"hash_input_tokens": "a9ea8e42e7ca4f0f", | |
"hash_cont_tokens": "e3bf43a8ea23285a" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "78ead1e22de562a8", | |
"hash_input_tokens": "ded49f2dd58c24f6", | |
"hash_cont_tokens": "45151dea1e2e71f3" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "0d7cff0e511b49e7", | |
"hash_input_tokens": "d16bae7ef2034333", | |
"hash_cont_tokens": "c51b8361a6f4e6bd" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "db7b7b19919ef4a7", | |
"hash_input_tokens": "8b1a10d2204d99c3", | |
"hash_cont_tokens": "ca75dd49f4c8d505" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "fd6a5580415c1e21", | |
"hash_input_tokens": "d48e4b55d3c0ab6c", | |
"hash_cont_tokens": "14d8d9e3f4729056" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "b7c9fdf1a2ad8106", | |
"hash_input_tokens": "019f6011d8ebb7b4", | |
"hash_cont_tokens": "4e32ce157b48f24a" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "35aa68650803f91c", | |
"hash_input_tokens": "fbf7bbc927857899", | |
"hash_cont_tokens": "6a051a13fb1325c7" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "954618143d9d5c6d", | |
"hash_input_tokens": "eab386685d65e6d7", | |
"hash_cont_tokens": "28c75cb68920c5e5" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "fd807f4380c14312", | |
"hash_input_tokens": "f17c7a83f10826d5", | |
"hash_cont_tokens": "e2b9bda947d0d72f" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "49ab3bc1ed62613f", | |
"hash_input_tokens": "4ecdc0361737b984", | |
"hash_cont_tokens": "650e93e42a6b12c3" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "fa5c1ca26f4a8d48", | |
"hash_input_tokens": "e0e9ee6065d02980", | |
"hash_cont_tokens": "c7cb581edecb36c5" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "607b29401b4907ec", | |
"hash_input_tokens": "e2868a0e92e68765", | |
"hash_cont_tokens": "96ff555916e57fb2" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "faab4b5e14b9304e", | |
"hash_input_tokens": "26a0ab6e389383c3", | |
"hash_cont_tokens": "819d7bf2bc094710" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "7e689cfb3916666f", | |
"hash_input_tokens": "d0e6770bedbf925a", | |
"hash_cont_tokens": "16010037e6da1d04" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "a80a61e259878fa0", | |
"hash_input_tokens": "9b1288a55911b79d", | |
"hash_cont_tokens": "2eb386173e7fff58" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "fc66cf32f54cd46f", | |
"hash_input_tokens": "1328545243e4b107", | |
"hash_cont_tokens": "405575bc8e10ea9d" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "3758756e616fd780", | |
"hash_input_tokens": "3ee0dfd31c556ab0", | |
"hash_cont_tokens": "b178ec6cb27af027" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |