open-r1-eval-leaderboard
/
eval_results
/databricks
/dbrx-instruct
/main
/bbh
/results_2024-03-27T20-03-59.053413.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1148305.096954957, | |
"end_time": 1151521.442601499, | |
"total_evaluation_time_secondes": "3216.3456465417985", | |
"model_name": "databricks/dbrx-instruct", | |
"model_sha": "464e701f50aef4c1b59c81fb5667819a5d08e108", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "245.12 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.23529411764705882, | |
"em_stderr": 0.03110257015343266, | |
"qem": 0.36363636363636365, | |
"qem_stderr": 0.03527198153014412, | |
"pem": 0.6256684491978609, | |
"pem_stderr": 0.03548492341343032, | |
"pqem": 0.6256684491978609, | |
"pqem_stderr": 0.03548492341343032, | |
"perfect_em": 0.22994652406417113, | |
"perfect_em_stderr": 0.03085442058348997 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.428, | |
"em_stderr": 0.031355968923772626, | |
"qem": 0.428, | |
"qem_stderr": 0.031355968923772626, | |
"pem": 0.524, | |
"pem_stderr": 0.03164968895968781, | |
"pqem": 0.588, | |
"pqem_stderr": 0.031191596026022894, | |
"perfect_em": 0.428, | |
"perfect_em_stderr": 0.031355968923772626 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.732, | |
"em_stderr": 0.028068762382526688, | |
"qem": 0.732, | |
"qem_stderr": 0.028068762382526688, | |
"pem": 0.74, | |
"pem_stderr": 0.027797315752644308, | |
"pqem": 0.772, | |
"pqem_stderr": 0.02658743248726848, | |
"perfect_em": 0.732, | |
"perfect_em_stderr": 0.028068762382526688 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.16, | |
"em_stderr": 0.023232714782060654, | |
"qem": 0.16, | |
"qem_stderr": 0.023232714782060654, | |
"pem": 0.16, | |
"pem_stderr": 0.023232714782060654, | |
"pqem": 0.16, | |
"pqem_stderr": 0.023232714782060654, | |
"perfect_em": 0.16, | |
"perfect_em_stderr": 0.023232714782060654 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.172, | |
"em_stderr": 0.023915513944486218, | |
"qem": 0.172, | |
"qem_stderr": 0.023915513944486218, | |
"pem": 0.416, | |
"pem_stderr": 0.031235856237014574, | |
"pqem": 0.512, | |
"pqem_stderr": 0.03167708558254709, | |
"perfect_em": 0.172, | |
"perfect_em_stderr": 0.023915513944486218 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.304, | |
"em_stderr": 0.029150213374159673, | |
"qem": 0.304, | |
"qem_stderr": 0.029150213374159673, | |
"pem": 0.404, | |
"pem_stderr": 0.031096688184825295, | |
"pqem": 0.48, | |
"pqem_stderr": 0.031660853408495185, | |
"perfect_em": 0.304, | |
"perfect_em_stderr": 0.029150213374159673 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.444, | |
"em_stderr": 0.03148684942554574, | |
"qem": 0.444, | |
"qem_stderr": 0.03148684942554574, | |
"pem": 0.704, | |
"pem_stderr": 0.028928939388379635, | |
"pqem": 0.816, | |
"pqem_stderr": 0.02455581299422256, | |
"perfect_em": 0.444, | |
"perfect_em_stderr": 0.03148684942554574 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.8353413654618473, | |
"em_stderr": 0.02355040652773773, | |
"qem": 0.8353413654618473, | |
"qem_stderr": 0.02355040652773773, | |
"pem": 0.9196787148594378, | |
"pem_stderr": 0.01725867901043425, | |
"pqem": 0.9357429718875502, | |
"pqem_stderr": 0.015570865895003505, | |
"perfect_em": 0.8353413654618473, | |
"perfect_em_stderr": 0.02355040652773773 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.004, | |
"em_stderr": 0.004000000000000008, | |
"qem": 0.004, | |
"qem_stderr": 0.004000000000000008, | |
"pem": 0.74, | |
"pem_stderr": 0.027797315752644304, | |
"pqem": 0.74, | |
"pqem_stderr": 0.027797315752644304, | |
"perfect_em": 0.004, | |
"perfect_em_stderr": 0.004000000000000008 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.416, | |
"em_stderr": 0.031235856237014577, | |
"qem": 0.416, | |
"qem_stderr": 0.031235856237014577, | |
"pem": 0.536, | |
"pem_stderr": 0.03160397514522374, | |
"pqem": 0.592, | |
"pqem_stderr": 0.0311452098465485, | |
"perfect_em": 0.416, | |
"perfect_em_stderr": 0.031235856237014577 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.7459677419354839, | |
"em_stderr": 0.027698482830691398, | |
"qem": 0.7459677419354839, | |
"qem_stderr": 0.027698482830691398, | |
"pem": 0.7661290322580645, | |
"pem_stderr": 0.02693336643435783, | |
"pqem": 0.8185483870967742, | |
"pqem_stderr": 0.024521900075481863, | |
"perfect_em": 0.7459677419354839, | |
"perfect_em_stderr": 0.027698482830691398 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.528, | |
"em_stderr": 0.031636489531544396, | |
"qem": 0.528, | |
"qem_stderr": 0.031636489531544396, | |
"pem": 0.528, | |
"pem_stderr": 0.031636489531544396, | |
"pqem": 0.636, | |
"pqem_stderr": 0.030491555220405562, | |
"perfect_em": 0.528, | |
"perfect_em_stderr": 0.031636489531544396 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.5224719101123596, | |
"em_stderr": 0.03754432508487194, | |
"qem": 0.5224719101123596, | |
"qem_stderr": 0.03754432508487194, | |
"pem": 0.5393258426966292, | |
"pem_stderr": 0.03746587736387869, | |
"pqem": 0.6123595505617978, | |
"pqem_stderr": 0.03662108091274476, | |
"perfect_em": 0.5224719101123596, | |
"perfect_em_stderr": 0.03754432508487194 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.776, | |
"em_stderr": 0.0264213616873479, | |
"qem": 0.776, | |
"qem_stderr": 0.0264213616873479, | |
"pem": 0.828, | |
"pem_stderr": 0.023915513944486218, | |
"pqem": 0.828, | |
"pqem_stderr": 0.023915513944486218, | |
"perfect_em": 0.0, | |
"perfect_em_stderr": 0.0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.216, | |
"em_stderr": 0.02607865766373272, | |
"qem": 0.216, | |
"qem_stderr": 0.02607865766373272, | |
"pem": 0.524, | |
"pem_stderr": 0.03164968895968781, | |
"pqem": 0.624, | |
"pqem_stderr": 0.030696336267394587, | |
"perfect_em": 0.216, | |
"perfect_em_stderr": 0.02607865766373272 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.12, | |
"em_stderr": 0.02059360059683996, | |
"qem": 0.12, | |
"qem_stderr": 0.02059360059683996, | |
"pem": 0.176, | |
"pem_stderr": 0.024133497525457123, | |
"pqem": 0.344, | |
"pqem_stderr": 0.03010450339231639, | |
"perfect_em": 0.12, | |
"perfect_em_stderr": 0.02059360059683996 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.124, | |
"em_stderr": 0.02088638225867326, | |
"qem": 0.124, | |
"qem_stderr": 0.02088638225867326, | |
"pem": 0.128, | |
"pem_stderr": 0.021172081336336492, | |
"pqem": 0.264, | |
"pqem_stderr": 0.027934518957690908, | |
"perfect_em": 0.124, | |
"perfect_em_stderr": 0.02088638225867326 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.252, | |
"em_stderr": 0.02751385193303136, | |
"qem": 0.252, | |
"qem_stderr": 0.02751385193303136, | |
"pem": 0.304, | |
"pem_stderr": 0.029150213374159673, | |
"pqem": 0.572, | |
"pqem_stderr": 0.03135596892377261, | |
"perfect_em": 0.252, | |
"perfect_em_stderr": 0.02751385193303136 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.3897263963975972, | |
"em_stderr": 0.026415111518748306, | |
"qem": 0.3968565211747808, | |
"qem_stderr": 0.026646745484121166, | |
"pem": 0.5312667799451106, | |
"pem_stderr": 0.02845237917201406, | |
"pqem": 0.6066844088191101, | |
"pqem_stderr": 0.02858584377125202, | |
"perfect_em": 0.3463181967541034, | |
"perfect_em_stderr": 0.024933472004454386 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "097ea69c97d8d63c", | |
"hash_input_tokens": "3bfacaabb95d349c", | |
"hash_cont_tokens": "cec3b3e7e6e6e652" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "4656fcc6bf6c088a", | |
"hash_input_tokens": "32c8c412a52e18d1", | |
"hash_cont_tokens": "1f86353fc71edb4f" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "9461b8566f753ce0", | |
"hash_input_tokens": "6f5f4a004aeb1b8e", | |
"hash_cont_tokens": "dfe6b802c4e8f17d" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "f215e166e623ef84", | |
"hash_input_tokens": "47b4e8527bb3e802", | |
"hash_cont_tokens": "b2f541fc2c616dfc" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "04672b6603f3d9a8", | |
"hash_input_tokens": "547935155694da62", | |
"hash_cont_tokens": "d5e72bb8a42fc663" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "13c33710cf8c618b", | |
"hash_input_tokens": "6e0cb450313c1aef", | |
"hash_cont_tokens": "8c208db35870aa4e" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "09850c0494e10c16", | |
"hash_input_tokens": "be59138a48dcd704", | |
"hash_cont_tokens": "39caced9428df5de" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "a72abaa067b5c9a1", | |
"hash_input_tokens": "b4441965edc623af", | |
"hash_cont_tokens": "365ec7ecb8a92c9f" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "ff834a37846d4a03", | |
"hash_input_tokens": "59637da21afcfeb3", | |
"hash_cont_tokens": "b2e053d73c3335b4" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "385be44b23e6af78", | |
"hash_input_tokens": "060d4491bf377e6b", | |
"hash_cont_tokens": "01279134cbcba1d6" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "bd1da410faf1f720", | |
"hash_input_tokens": "6520eacde64b9667", | |
"hash_cont_tokens": "092d1db0c104f0f5" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "f448bbcf0972445e", | |
"hash_input_tokens": "c96ed8fb6d2aca49", | |
"hash_cont_tokens": "b042b65816db3645" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "20cdeb5f4b9a9949", | |
"hash_input_tokens": "de17a274d93bbe3b", | |
"hash_cont_tokens": "cb1f09b1af04f980" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "f34bf1748d3506ea", | |
"hash_input_tokens": "29686689437e3dab", | |
"hash_cont_tokens": "ce07939b76318f7f" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "48241700c5214f8a", | |
"hash_input_tokens": "a6622a8c410a042e", | |
"hash_cont_tokens": "e67cde9b74581d9a" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "76f44a206b692434", | |
"hash_input_tokens": "39f2012b855cd277", | |
"hash_cont_tokens": "4953e9d43feffaf8" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "c75b891fd5861ef5", | |
"hash_input_tokens": "c96ed8fb6d2aca49", | |
"hash_cont_tokens": "18010672c00da0a1" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "f80b4803a5117e21", | |
"hash_input_tokens": "7079a7c11e39f5fb", | |
"hash_cont_tokens": "8bc932e0586f5943" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "efd066c002f8e0a5", | |
"hash_input_tokens": "d0f715eb2848ef32", | |
"hash_cont_tokens": "e1b8de4f0f59c494" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |