open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/qwen-1.5-1.8b-sft
/v1.0
/bbh
/results_2024-03-20T10-20-40.831195.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 674618.279285048, | |
"end_time": 674872.211203815, | |
"total_evaluation_time_secondes": "253.93191876693163", | |
"model_name": "HuggingFaceH4/qwen-1.5-1.8b-sft", | |
"model_sha": "8a4f031a295132269fc8d07a9a0eb8f72a1f291a", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "3.79 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.4385026737967914, | |
"em_stderr": 0.03638341809400995, | |
"qem": 0.5508021390374331, | |
"qem_stderr": 0.0364720501817238, | |
"pem": 0.5026737967914439, | |
"pem_stderr": 0.03666125454759919, | |
"pqem": 0.5508021390374331, | |
"pqem_stderr": 0.0364720501817238, | |
"perfect_em": 0.4385026737967914, | |
"perfect_em_stderr": 0.03638341809400995 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.0, | |
"pem_stderr": 0.0, | |
"pqem": 0.344, | |
"pqem_stderr": 0.030104503392316385, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.24, | |
"pem_stderr": 0.027065293652239003, | |
"pqem": 0.328, | |
"pqem_stderr": 0.029752391824475383, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.0, | |
"pem_stderr": 0.0, | |
"pqem": 0.124, | |
"pqem_stderr": 0.02088638225867326, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.036, | |
"em_stderr": 0.011805655169278133, | |
"qem": 0.036, | |
"qem_stderr": 0.011805655169278133, | |
"pem": 0.188, | |
"pem_stderr": 0.024760377727750495, | |
"pqem": 0.336, | |
"pqem_stderr": 0.029933259094191516, | |
"perfect_em": 0.036, | |
"perfect_em_stderr": 0.011805655169278133 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.044, | |
"em_stderr": 0.012997373846574957, | |
"qem": 0.044, | |
"qem_stderr": 0.012997373846574957, | |
"pem": 0.136, | |
"pem_stderr": 0.021723342617052048, | |
"pqem": 0.332, | |
"pqem_stderr": 0.029844039047465912, | |
"perfect_em": 0.044, | |
"perfect_em_stderr": 0.012997373846574957 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.076, | |
"em_stderr": 0.01679357306785964, | |
"qem": 0.076, | |
"qem_stderr": 0.01679357306785964, | |
"pem": 0.344, | |
"pem_stderr": 0.030104503392316385, | |
"pqem": 0.464, | |
"pqem_stderr": 0.03160397514522374, | |
"perfect_em": 0.076, | |
"perfect_em_stderr": 0.01679357306785964 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.1285140562248996, | |
"em_stderr": 0.02125101454670356, | |
"qem": 0.1285140562248996, | |
"qem_stderr": 0.02125101454670356, | |
"pem": 0.4779116465863454, | |
"pem_stderr": 0.03171903523348455, | |
"pqem": 0.606425702811245, | |
"pqem_stderr": 0.031022466480549557, | |
"perfect_em": 0.1285140562248996, | |
"perfect_em_stderr": 0.02125101454670356 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.472, | |
"em_stderr": 0.03163648953154439, | |
"qem": 0.472, | |
"qem_stderr": 0.03163648953154439, | |
"pem": 0.472, | |
"pem_stderr": 0.03163648953154439, | |
"pqem": 0.472, | |
"pqem_stderr": 0.03163648953154439, | |
"perfect_em": 0.472, | |
"perfect_em_stderr": 0.03163648953154439 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.084, | |
"pem_stderr": 0.01757873852677635, | |
"pqem": 0.264, | |
"pqem_stderr": 0.027934518957690908, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.020161290322580645, | |
"em_stderr": 0.008943097828248618, | |
"qem": 0.020161290322580645, | |
"qem_stderr": 0.008943097828248618, | |
"pem": 0.2661290322580645, | |
"pem_stderr": 0.028119494185147476, | |
"pqem": 0.5403225806451613, | |
"pqem_stderr": 0.031710615183950554, | |
"perfect_em": 0.020161290322580645, | |
"perfect_em_stderr": 0.008943097828248618 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.032, | |
"em_stderr": 0.011153546809230827, | |
"qem": 0.032, | |
"qem_stderr": 0.011153546809230827, | |
"pem": 0.224, | |
"pem_stderr": 0.026421361687347905, | |
"pqem": 0.364, | |
"pqem_stderr": 0.03049155522040556, | |
"perfect_em": 0.032, | |
"perfect_em_stderr": 0.011153546809230827 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.23595505617977527, | |
"em_stderr": 0.031914457312533576, | |
"qem": 0.23595505617977527, | |
"qem_stderr": 0.031914457312533576, | |
"pem": 0.4606741573033708, | |
"pem_stderr": 0.0374658773638787, | |
"pqem": 0.6235955056179775, | |
"pqem_stderr": 0.036416001247281636, | |
"perfect_em": 0.23595505617977527, | |
"perfect_em_stderr": 0.031914457312533576 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.532, | |
"em_stderr": 0.03162125257572551, | |
"qem": 0.532, | |
"qem_stderr": 0.03162125257572551, | |
"pem": 0.532, | |
"pem_stderr": 0.03162125257572551, | |
"pqem": 0.548, | |
"pqem_stderr": 0.031539864492556614, | |
"perfect_em": 0.532, | |
"perfect_em_stderr": 0.03162125257572551 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.228, | |
"pem_stderr": 0.026587432487268508, | |
"pqem": 0.452, | |
"pqem_stderr": 0.031539864492556614, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.204, | |
"pem_stderr": 0.02553712157454817, | |
"pqem": 0.36, | |
"pqem_stderr": 0.030418764025174988, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.132, | |
"pem_stderr": 0.02145098082403808, | |
"pqem": 0.244, | |
"pqem_stderr": 0.027217995464553182, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.0, | |
"em_stderr": 0.0, | |
"qem": 0.0, | |
"qem_stderr": 0.0, | |
"pem": 0.304, | |
"pem_stderr": 0.029150213374159673, | |
"pqem": 0.396, | |
"pqem_stderr": 0.03099319785457785, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.11195183758466928, | |
"em_stderr": 0.011916659932317176, | |
"qem": 0.11819069676470494, | |
"qem_stderr": 0.011921583937190165, | |
"pem": 0.2664104796077347, | |
"pem_stderr": 0.02486682051671536, | |
"pqem": 0.4082858848951009, | |
"pqem_stderr": 0.030528774105272882, | |
"perfect_em": 0.11195183758466928, | |
"perfect_em_stderr": 0.011916659932317176 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "7303fa1d0fe0b29a", | |
"hash_input_tokens": "cf79afd99566e177", | |
"hash_cont_tokens": "c0b4cb59b27fb376" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "69e60d10afa5a6f1", | |
"hash_input_tokens": "388cef1d7f8452a8", | |
"hash_cont_tokens": "48841cb3f3361085" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "ae0a8fd428f9aee3", | |
"hash_input_tokens": "a494e338dc59b8fb", | |
"hash_cont_tokens": "869d56f2e64faaa7" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "76633257f67207f9", | |
"hash_input_tokens": "d6be67e4cb0f3f4a", | |
"hash_cont_tokens": "206b2bedea188fd0" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "3c96645848786efd", | |
"hash_input_tokens": "edcff2e9d8fd8c91", | |
"hash_cont_tokens": "67dcc5dc1fbb9982" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "185c5851c101ee66", | |
"hash_input_tokens": "2e5676a696b56200", | |
"hash_cont_tokens": "f351aa22dec0831a" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "8ba2d94357e589d0", | |
"hash_input_tokens": "63f32ea9b18cecb4", | |
"hash_cont_tokens": "9ca2c483111905e2" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "a411e216d0f5f626", | |
"hash_input_tokens": "e8b926724fa885a7", | |
"hash_cont_tokens": "8a1631febbda9eb6" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "ebb3084ecc78a46a", | |
"hash_input_tokens": "b54f3b5d8ac511c3", | |
"hash_cont_tokens": "e6647d906305a15c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "38328d016a4ebef3", | |
"hash_input_tokens": "d1a319d6710bf581", | |
"hash_cont_tokens": "803636c28f9678c7" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "9c7d0493c37182d6", | |
"hash_input_tokens": "891c7b0ae6f3dc5e", | |
"hash_cont_tokens": "6c8aa5ad3c3f2664" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "edccd4061b168b78", | |
"hash_input_tokens": "84a150e4b9b8f3d5", | |
"hash_cont_tokens": "31da9a1f75de8198" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "31cafd95ab850a44", | |
"hash_input_tokens": "dc272d8ad81f9aaa", | |
"hash_cont_tokens": "d1db836ecdeac7ac" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "3d46581e9bbec2d0", | |
"hash_input_tokens": "026cfbc90dc57cad", | |
"hash_cont_tokens": "b37c83606195e77d" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "4a54db144a5dd222", | |
"hash_input_tokens": "5a5f2ab5fb1f5226", | |
"hash_cont_tokens": "b41ffa1f049b8181" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "e3079106787cc311", | |
"hash_input_tokens": "358eebb2bcdbdd42", | |
"hash_cont_tokens": "1d97c49ee5ed9f95" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "6364e5b860590ec8", | |
"hash_input_tokens": "6badf0e5e3a32821", | |
"hash_cont_tokens": "23c3d6b7162951a4" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "01aef56c4d1fe9fe", | |
"hash_input_tokens": "eb2ff4aa41691220", | |
"hash_cont_tokens": "708fd4592b127616" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "1cbeab0a00117cb8", | |
"hash_input_tokens": "5548b4dbc458e592", | |
"hash_cont_tokens": "a63441544c47df2c" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |