open-r1-eval-leaderboard
/
eval_results
/Qwen
/Qwen1.5-0.5B-Chat
/main
/bbh
/results_2024-03-18T19-43-00.075213.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1047853.482597071, | |
"end_time": 1048029.018555961, | |
"total_evaluation_time_secondes": "175.53595888998825", | |
"model_name": "Qwen/Qwen1.5-0.5B-Chat", | |
"model_sha": "6c705984bb8b5591dd4e1a9e66e1a127965fd08d", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "1.05 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.45454545454545453, | |
"em_stderr": 0.036509969495568145, | |
"qem": 0.45454545454545453, | |
"qem_stderr": 0.036509969495568145, | |
"pem": 0.45454545454545453, | |
"pem_stderr": 0.036509969495568145, | |
"pqem": 0.45454545454545453, | |
"pqem_stderr": 0.036509969495568145, | |
"perfect_em": 0.45454545454545453, | |
"perfect_em_stderr": 0.036509969495568145 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.116, | |
"em_stderr": 0.020293429803083837, | |
"qem": 0.116, | |
"qem_stderr": 0.020293429803083837, | |
"pem": 0.164, | |
"pem_stderr": 0.023465261002076757, | |
"pqem": 0.356, | |
"pqem_stderr": 0.030343680657153215, | |
"perfect_em": 0.116, | |
"perfect_em_stderr": 0.020293429803083837 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.388, | |
"em_stderr": 0.030881038748993922, | |
"qem": 0.388, | |
"qem_stderr": 0.030881038748993922, | |
"pem": 0.388, | |
"pem_stderr": 0.030881038748993922, | |
"pqem": 0.7, | |
"pqem_stderr": 0.02904089347757585, | |
"perfect_em": 0.388, | |
"perfect_em_stderr": 0.030881038748993922 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.176, | |
"em_stderr": 0.024133497525457116, | |
"qem": 0.176, | |
"qem_stderr": 0.024133497525457116, | |
"pem": 0.176, | |
"pem_stderr": 0.024133497525457116, | |
"pqem": 0.176, | |
"pqem_stderr": 0.024133497525457116, | |
"perfect_em": 0.176, | |
"perfect_em_stderr": 0.024133497525457116 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.176, | |
"em_stderr": 0.024133497525457116, | |
"qem": 0.176, | |
"qem_stderr": 0.024133497525457116, | |
"pem": 0.176, | |
"pem_stderr": 0.024133497525457116, | |
"pqem": 0.368, | |
"pqem_stderr": 0.030562070620993163, | |
"perfect_em": 0.176, | |
"perfect_em_stderr": 0.024133497525457116 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.132, | |
"em_stderr": 0.021450980824038107, | |
"qem": 0.132, | |
"qem_stderr": 0.021450980824038107, | |
"pem": 0.132, | |
"pem_stderr": 0.021450980824038107, | |
"pqem": 0.28, | |
"pqem_stderr": 0.028454148277832332, | |
"perfect_em": 0.132, | |
"perfect_em_stderr": 0.021450980824038107 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.344, | |
"em_stderr": 0.03010450339231639, | |
"qem": 0.344, | |
"qem_stderr": 0.03010450339231639, | |
"pem": 0.344, | |
"pem_stderr": 0.03010450339231639, | |
"pqem": 0.656, | |
"pqem_stderr": 0.03010450339231639, | |
"perfect_em": 0.344, | |
"perfect_em_stderr": 0.03010450339231639 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.30522088353413657, | |
"em_stderr": 0.029241837556221117, | |
"qem": 0.30522088353413657, | |
"qem_stderr": 0.029241837556221117, | |
"pem": 0.3654618473895582, | |
"pem_stderr": 0.030579054441446426, | |
"pqem": 0.5903614457831325, | |
"pqem_stderr": 0.03122723745928603, | |
"perfect_em": 0.30522088353413657, | |
"perfect_em_stderr": 0.029241837556221117 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.424, | |
"em_stderr": 0.03131803437491615, | |
"qem": 0.424, | |
"qem_stderr": 0.03131803437491615, | |
"pem": 0.424, | |
"pem_stderr": 0.03131803437491615, | |
"pqem": 0.424, | |
"pqem_stderr": 0.03131803437491615, | |
"perfect_em": 0.424, | |
"perfect_em_stderr": 0.03131803437491615 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.148, | |
"em_stderr": 0.022503547243806144, | |
"qem": 0.148, | |
"qem_stderr": 0.022503547243806144, | |
"pem": 0.148, | |
"pem_stderr": 0.022503547243806144, | |
"pqem": 0.32, | |
"pqem_stderr": 0.02956172495524104, | |
"perfect_em": 0.148, | |
"perfect_em_stderr": 0.022503547243806144 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.27419354838709675, | |
"em_stderr": 0.028385108031064067, | |
"qem": 0.27419354838709675, | |
"qem_stderr": 0.028385108031064067, | |
"pem": 0.2782258064516129, | |
"pem_stderr": 0.028513524532740565, | |
"pqem": 0.5645161290322581, | |
"pqem_stderr": 0.031548283738756754, | |
"perfect_em": 0.27419354838709675, | |
"perfect_em_stderr": 0.028385108031064067 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.224, | |
"em_stderr": 0.026421361687347905, | |
"qem": 0.224, | |
"qem_stderr": 0.026421361687347905, | |
"pem": 0.224, | |
"pem_stderr": 0.026421361687347905, | |
"pqem": 0.364, | |
"pqem_stderr": 0.03049155522040556, | |
"perfect_em": 0.224, | |
"perfect_em_stderr": 0.026421361687347905 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.48314606741573035, | |
"em_stderr": 0.037560944447344806, | |
"qem": 0.48314606741573035, | |
"qem_stderr": 0.037560944447344806, | |
"pem": 0.48314606741573035, | |
"pem_stderr": 0.037560944447344806, | |
"pqem": 0.702247191011236, | |
"pqem_stderr": 0.03437053607199152, | |
"perfect_em": 0.48314606741573035, | |
"perfect_em_stderr": 0.037560944447344806 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.468, | |
"em_stderr": 0.03162125257572551, | |
"qem": 0.468, | |
"qem_stderr": 0.03162125257572551, | |
"pem": 0.468, | |
"pem_stderr": 0.03162125257572551, | |
"pqem": 0.468, | |
"pqem_stderr": 0.03162125257572551, | |
"perfect_em": 0.468, | |
"perfect_em_stderr": 0.03162125257572551 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.264, | |
"em_stderr": 0.02793451895769091, | |
"qem": 0.264, | |
"qem_stderr": 0.02793451895769091, | |
"pem": 0.264, | |
"pem_stderr": 0.02793451895769091, | |
"pqem": 0.548, | |
"pqem_stderr": 0.03153986449255663, | |
"perfect_em": 0.264, | |
"perfect_em_stderr": 0.02793451895769091 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.196, | |
"em_stderr": 0.02515685731325594, | |
"qem": 0.196, | |
"qem_stderr": 0.02515685731325594, | |
"pem": 0.216, | |
"pem_stderr": 0.02607865766373274, | |
"pqem": 0.416, | |
"pqem_stderr": 0.031235856237014577, | |
"perfect_em": 0.196, | |
"perfect_em_stderr": 0.02515685731325594 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.136, | |
"em_stderr": 0.021723342617052062, | |
"qem": 0.136, | |
"qem_stderr": 0.021723342617052062, | |
"pem": 0.144, | |
"pem_stderr": 0.022249407735450217, | |
"pqem": 0.288, | |
"pqem_stderr": 0.028697004587398205, | |
"perfect_em": 0.136, | |
"perfect_em_stderr": 0.021723342617052062 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.332, | |
"em_stderr": 0.02984403904746589, | |
"qem": 0.332, | |
"qem_stderr": 0.02984403904746589, | |
"pem": 0.352, | |
"pem_stderr": 0.030266288057359928, | |
"pqem": 0.668, | |
"pqem_stderr": 0.029844039047465912, | |
"perfect_em": 0.332, | |
"perfect_em_stderr": 0.02984403904746589 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.2800614418823566, | |
"em_stderr": 0.02773432006482251, | |
"qem": 0.2800614418823566, | |
"qem_stderr": 0.02773432006482251, | |
"pem": 0.28896550976679763, | |
"pem_stderr": 0.028095852235081605, | |
"pqem": 0.46353723446511563, | |
"pqem_stderr": 0.03058911956709189, | |
"perfect_em": 0.2800614418823566, | |
"perfect_em_stderr": 0.02773432006482251 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "148a4c45a8d2b858", | |
"hash_input_tokens": "41f3903cf7efd5be", | |
"hash_cont_tokens": "2b1c252616552565" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "e79a3237877b106e", | |
"hash_input_tokens": "ae3f6744fd7add8b", | |
"hash_cont_tokens": "6bc80b0ca66464e7" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "9458fa1926b438bb", | |
"hash_input_tokens": "a9ea8e42e7ca4f0f", | |
"hash_cont_tokens": "eb6214a3fd3cb167" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "78ead1e22de562a8", | |
"hash_input_tokens": "ded49f2dd58c24f6", | |
"hash_cont_tokens": "31edbe902bbed74b" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "0d7cff0e511b49e7", | |
"hash_input_tokens": "d16bae7ef2034333", | |
"hash_cont_tokens": "958ce48a7b35c33a" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "db7b7b19919ef4a7", | |
"hash_input_tokens": "8b1a10d2204d99c3", | |
"hash_cont_tokens": "b8a3547024ba00da" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "fd6a5580415c1e21", | |
"hash_input_tokens": "d48e4b55d3c0ab6c", | |
"hash_cont_tokens": "8b76b4d4ed2635c1" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "b7c9fdf1a2ad8106", | |
"hash_input_tokens": "019f6011d8ebb7b4", | |
"hash_cont_tokens": "0ce9ab06e6d45f53" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "35aa68650803f91c", | |
"hash_input_tokens": "fbf7bbc927857899", | |
"hash_cont_tokens": "4c1c1d085fa4b5e8" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "954618143d9d5c6d", | |
"hash_input_tokens": "eab386685d65e6d7", | |
"hash_cont_tokens": "bd0e5dbe9dc6b564" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "fd807f4380c14312", | |
"hash_input_tokens": "f17c7a83f10826d5", | |
"hash_cont_tokens": "41c70c002212ae1d" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "49ab3bc1ed62613f", | |
"hash_input_tokens": "4ecdc0361737b984", | |
"hash_cont_tokens": "6f5bea4e5dafeb02" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "fa5c1ca26f4a8d48", | |
"hash_input_tokens": "e0e9ee6065d02980", | |
"hash_cont_tokens": "2ad77d2d56c85ba8" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "607b29401b4907ec", | |
"hash_input_tokens": "e2868a0e92e68765", | |
"hash_cont_tokens": "47b63407c2dc4c1e" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "faab4b5e14b9304e", | |
"hash_input_tokens": "26a0ab6e389383c3", | |
"hash_cont_tokens": "ec9978fbd9924f4c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "7e689cfb3916666f", | |
"hash_input_tokens": "d0e6770bedbf925a", | |
"hash_cont_tokens": "ac5fa8cffb972c8e" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "a80a61e259878fa0", | |
"hash_input_tokens": "9b1288a55911b79d", | |
"hash_cont_tokens": "8736eeb6db7cd358" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "fc66cf32f54cd46f", | |
"hash_input_tokens": "1328545243e4b107", | |
"hash_cont_tokens": "81f84fcc367a02ea" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "3758756e616fd780", | |
"hash_input_tokens": "3ee0dfd31c556ab0", | |
"hash_cont_tokens": "a6844dce5b8e19e8" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |