open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/mistral-7b-ift
/v25.2
/bbh
/results_2024-03-25T15-55-57.007744.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1467500.567268427, | |
"end_time": 1467697.843720951, | |
"total_evaluation_time_secondes": "197.27645252388902", | |
"model_name": "HuggingFaceH4/mistral-7b-ift", | |
"model_sha": "c611133be4aeaac5cf03e723fdc682a46dbebb51", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "13.99 GB", | |
"config": null | |
}, | |
"results": { | |
"harness|bbh:causal_judgment|3": { | |
"em": 0.5561497326203209, | |
"em_stderr": 0.03642987131924728, | |
"qem": 0.5561497326203209, | |
"qem_stderr": 0.03642987131924728, | |
"pem": 0.5561497326203209, | |
"pem_stderr": 0.03642987131924728, | |
"pqem": 0.5561497326203209, | |
"pqem_stderr": 0.03642987131924728, | |
"perfect_em": 0.5561497326203209, | |
"perfect_em_stderr": 0.03642987131924728 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"em": 0.48, | |
"em_stderr": 0.031660853408495185, | |
"qem": 0.48, | |
"qem_stderr": 0.031660853408495185, | |
"pem": 0.48, | |
"pem_stderr": 0.031660853408495185, | |
"pqem": 0.572, | |
"pqem_stderr": 0.031355968923772605, | |
"perfect_em": 0.48, | |
"perfect_em_stderr": 0.031660853408495185 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"em": 0.596, | |
"em_stderr": 0.031096688184825295, | |
"qem": 0.596, | |
"qem_stderr": 0.031096688184825295, | |
"pem": 0.596, | |
"pem_stderr": 0.031096688184825295, | |
"pqem": 0.728, | |
"pqem_stderr": 0.02820008829630998, | |
"perfect_em": 0.596, | |
"perfect_em_stderr": 0.031096688184825295 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"em": 0.2, | |
"em_stderr": 0.025348970020979078, | |
"qem": 0.2, | |
"qem_stderr": 0.025348970020979078, | |
"pem": 0.2, | |
"pem_stderr": 0.025348970020979078, | |
"pqem": 0.2, | |
"pqem_stderr": 0.025348970020979078, | |
"perfect_em": 0.2, | |
"perfect_em_stderr": 0.025348970020979078 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"em": 0.352, | |
"em_stderr": 0.030266288057359942, | |
"qem": 0.352, | |
"qem_stderr": 0.030266288057359942, | |
"pem": 0.352, | |
"pem_stderr": 0.030266288057359942, | |
"pqem": 0.484, | |
"pqem_stderr": 0.03166998503010742, | |
"perfect_em": 0.352, | |
"perfect_em_stderr": 0.030266288057359942 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"em": 0.284, | |
"em_stderr": 0.028576958730437408, | |
"qem": 0.284, | |
"qem_stderr": 0.028576958730437408, | |
"pem": 0.284, | |
"pem_stderr": 0.028576958730437408, | |
"pqem": 0.4, | |
"pqem_stderr": 0.031046021028253237, | |
"perfect_em": 0.284, | |
"perfect_em_stderr": 0.028576958730437408 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"em": 0.5, | |
"em_stderr": 0.031686212526223896, | |
"qem": 0.5, | |
"qem_stderr": 0.031686212526223896, | |
"pem": 0.5, | |
"pem_stderr": 0.031686212526223896, | |
"pqem": 0.756, | |
"pqem_stderr": 0.027217995464553182, | |
"perfect_em": 0.5, | |
"perfect_em_stderr": 0.031686212526223896 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"em": 0.5582329317269076, | |
"em_stderr": 0.031533963107802196, | |
"qem": 0.5582329317269076, | |
"qem_stderr": 0.031533963107802196, | |
"pem": 0.5582329317269076, | |
"pem_stderr": 0.031533963107802196, | |
"pqem": 0.6626506024096386, | |
"pqem_stderr": 0.030023161860939146, | |
"perfect_em": 0.5582329317269076, | |
"perfect_em_stderr": 0.031533963107802196 | |
}, | |
"harness|bbh:navigate|3": { | |
"em": 0.604, | |
"em_stderr": 0.030993197854577846, | |
"qem": 0.604, | |
"qem_stderr": 0.030993197854577846, | |
"pem": 0.604, | |
"pem_stderr": 0.030993197854577846, | |
"pqem": 0.604, | |
"pqem_stderr": 0.030993197854577846, | |
"perfect_em": 0.604, | |
"perfect_em_stderr": 0.030993197854577846 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"em": 0.348, | |
"em_stderr": 0.0301865684645117, | |
"qem": 0.348, | |
"qem_stderr": 0.0301865684645117, | |
"pem": 0.356, | |
"pem_stderr": 0.030343680657153215, | |
"pqem": 0.512, | |
"pqem_stderr": 0.03167708558254708, | |
"perfect_em": 0.348, | |
"perfect_em_stderr": 0.0301865684645117 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"em": 0.3185483870967742, | |
"em_stderr": 0.029645360812643396, | |
"qem": 0.3185483870967742, | |
"qem_stderr": 0.029645360812643396, | |
"pem": 0.3185483870967742, | |
"pem_stderr": 0.029645360812643396, | |
"pqem": 0.45564516129032256, | |
"pqem_stderr": 0.03168881131061097, | |
"perfect_em": 0.3185483870967742, | |
"perfect_em_stderr": 0.029645360812643396 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"em": 0.352, | |
"em_stderr": 0.030266288057359942, | |
"qem": 0.352, | |
"qem_stderr": 0.030266288057359942, | |
"pem": 0.352, | |
"pem_stderr": 0.030266288057359942, | |
"pqem": 0.492, | |
"pqem_stderr": 0.031682156431413803, | |
"perfect_em": 0.352, | |
"perfect_em_stderr": 0.030266288057359942 | |
}, | |
"harness|bbh:snarks|3": { | |
"em": 0.5224719101123596, | |
"em_stderr": 0.037544325084871946, | |
"qem": 0.5224719101123596, | |
"qem_stderr": 0.037544325084871946, | |
"pem": 0.5224719101123596, | |
"pem_stderr": 0.037544325084871946, | |
"pqem": 0.6966292134831461, | |
"pqem_stderr": 0.03455421944400099, | |
"perfect_em": 0.5224719101123596, | |
"perfect_em_stderr": 0.037544325084871946 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"em": 0.78, | |
"em_stderr": 0.026251792824605824, | |
"qem": 0.78, | |
"qem_stderr": 0.026251792824605824, | |
"pem": 0.78, | |
"pem_stderr": 0.026251792824605824, | |
"pqem": 0.78, | |
"pqem_stderr": 0.026251792824605824, | |
"perfect_em": 0.78, | |
"perfect_em_stderr": 0.026251792824605824 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"em": 0.156, | |
"em_stderr": 0.022995023034068744, | |
"qem": 0.156, | |
"qem_stderr": 0.022995023034068744, | |
"pem": 0.156, | |
"pem_stderr": 0.022995023034068744, | |
"pqem": 0.436, | |
"pqem_stderr": 0.03142556706028128, | |
"perfect_em": 0.156, | |
"perfect_em_stderr": 0.022995023034068744 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"em": 0.192, | |
"em_stderr": 0.024960691989172015, | |
"qem": 0.192, | |
"qem_stderr": 0.024960691989172015, | |
"pem": 0.192, | |
"pem_stderr": 0.024960691989172015, | |
"pqem": 0.392, | |
"pqem_stderr": 0.030938207620401195, | |
"perfect_em": 0.192, | |
"perfect_em_stderr": 0.024960691989172015 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"em": 0.12, | |
"em_stderr": 0.02059360059683997, | |
"qem": 0.12, | |
"qem_stderr": 0.02059360059683997, | |
"pem": 0.12, | |
"pem_stderr": 0.02059360059683997, | |
"pqem": 0.264, | |
"pqem_stderr": 0.02793451895769091, | |
"perfect_em": 0.12, | |
"perfect_em_stderr": 0.02059360059683997 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"em": 0.348, | |
"em_stderr": 0.030186568464511686, | |
"qem": 0.348, | |
"qem_stderr": 0.030186568464511686, | |
"pem": 0.348, | |
"pem_stderr": 0.030186568464511686, | |
"pqem": 0.664, | |
"pqem_stderr": 0.029933259094191516, | |
"perfect_em": 0.348, | |
"perfect_em_stderr": 0.030186568464511686 | |
}, | |
"harness|bbh:_average|3": { | |
"em": 0.40374460897535347, | |
"em_stderr": 0.029456845696585188, | |
"qem": 0.40374460897535347, | |
"qem_stderr": 0.029456845696585188, | |
"pem": 0.40418905341979783, | |
"pem_stderr": 0.029465574151731937, | |
"pqem": 0.5363930394335238, | |
"pqem_stderr": 0.03046504878469352, | |
"perfect_em": 0.40374460897535347, | |
"perfect_em_stderr": 0.029456845696585188 | |
} | |
}, | |
"versions": { | |
"harness|bbh:causal_judgment|3": 0, | |
"harness|bbh:date_understanding|3": 0, | |
"harness|bbh:disambiguation_qa|3": 0, | |
"harness|bbh:geometric_shapes|3": 0, | |
"harness|bbh:logical_deduction_five_objects|3": 0, | |
"harness|bbh:logical_deduction_seven_objects|3": 0, | |
"harness|bbh:logical_deduction_three_objects|3": 0, | |
"harness|bbh:movie_recommendation|3": 0, | |
"harness|bbh:navigate|3": 0, | |
"harness|bbh:reasoning_about_colored_objects|3": 0, | |
"harness|bbh:ruin_names|3": 0, | |
"harness|bbh:salient_translation_error_detection|3": 0, | |
"harness|bbh:snarks|3": 0, | |
"harness|bbh:sports_understanding|3": 0, | |
"harness|bbh:temporal_sequences|3": 0, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0 | |
}, | |
"config_tasks": { | |
"harness|bbh:causal_judgment": { | |
"name": "bbh:causal_judgment", | |
"prompt_function": "bbh_causal_judgment", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 187, | |
"effective_num_docs": 187, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:date_understanding": { | |
"name": "bbh:date_understanding", | |
"prompt_function": "bbh_date_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:disambiguation_qa": { | |
"name": "bbh:disambiguation_qa", | |
"prompt_function": "bbh_disambiguation_qa", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:geometric_shapes": { | |
"name": "bbh:geometric_shapes", | |
"prompt_function": "bbh_geometric_shapes", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_five_objects": { | |
"name": "bbh:logical_deduction_five_objects", | |
"prompt_function": "bbh_logical_deduction_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_seven_objects": { | |
"name": "bbh:logical_deduction_seven_objects", | |
"prompt_function": "bbh_logical_deduction_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:logical_deduction_three_objects": { | |
"name": "bbh:logical_deduction_three_objects", | |
"prompt_function": "bbh_logical_deduction_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:movie_recommendation": { | |
"name": "bbh:movie_recommendation", | |
"prompt_function": "bbh_movie_recommendation", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 249, | |
"effective_num_docs": 249, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:navigate": { | |
"name": "bbh:navigate", | |
"prompt_function": "bbh_navigate", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:reasoning_about_colored_objects": { | |
"name": "bbh:reasoning_about_colored_objects", | |
"prompt_function": "bbh_reasoning_about_colored_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:ruin_names": { | |
"name": "bbh:ruin_names", | |
"prompt_function": "bbh_ruin_names", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 248, | |
"effective_num_docs": 248, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:salient_translation_error_detection": { | |
"name": "bbh:salient_translation_error_detection", | |
"prompt_function": "bbh_salient_translation_error_detection", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:snarks": { | |
"name": "bbh:snarks", | |
"prompt_function": "bbh_snarks", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 178, | |
"effective_num_docs": 178, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:sports_understanding": { | |
"name": "bbh:sports_understanding", | |
"prompt_function": "bbh_sports_understanding", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:temporal_sequences": { | |
"name": "bbh:temporal_sequences", | |
"prompt_function": "bbh_temporal_sequences", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects": { | |
"name": "bbh:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_five_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects": { | |
"name": "bbh:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects": { | |
"name": "bbh:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_tracking_shuffled_objects_three_objects", | |
"hf_repo": "lukaemon/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"exact_match", | |
"quasi_exact_match", | |
"prefix_exact_match", | |
"prefix_quasi_exact_match", | |
"perfect_exact_match" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": 20, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"harness" | |
], | |
"original_num_docs": 250, | |
"effective_num_docs": 250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"harness|bbh:causal_judgment|3": { | |
"hashes": { | |
"hash_examples": "63218f5ae055ab2b", | |
"hash_full_prompts": "fa8168f39a475fb0", | |
"hash_input_tokens": "787f75e06fd43c0d", | |
"hash_cont_tokens": "e4c253a8681bf364" | |
}, | |
"truncated": 187, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 187, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:date_understanding|3": { | |
"hashes": { | |
"hash_examples": "f145c7a06def3c8e", | |
"hash_full_prompts": "2cceeea606638d49", | |
"hash_input_tokens": "10c13d6fb8af7c22", | |
"hash_cont_tokens": "0ed430d5278b416c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:disambiguation_qa|3": { | |
"hashes": { | |
"hash_examples": "19677fd1773f7eb9", | |
"hash_full_prompts": "d8f1ba70c22ae578", | |
"hash_input_tokens": "c21a88707f480cab", | |
"hash_cont_tokens": "7b7794fcb91031e2" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:geometric_shapes|3": { | |
"hashes": { | |
"hash_examples": "76c7b11a13cc72a9", | |
"hash_full_prompts": "52a60ed1d0113b8b", | |
"hash_input_tokens": "10e113b2cf3fa584", | |
"hash_cont_tokens": "01cecc5dff38dd91" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_five_objects|3": { | |
"hashes": { | |
"hash_examples": "0e958c856332a745", | |
"hash_full_prompts": "253aa9791c941909", | |
"hash_input_tokens": "0bc166cab0aed76a", | |
"hash_cont_tokens": "9d26dd290bafa8fe" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "ab9de25a5eb40d09", | |
"hash_full_prompts": "aa6117f601cd268e", | |
"hash_input_tokens": "ab99c78b48e3a0bb", | |
"hash_cont_tokens": "9048cc3f73ab92fb" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:logical_deduction_three_objects|3": { | |
"hashes": { | |
"hash_examples": "3c6bf52517714218", | |
"hash_full_prompts": "1892b050bc7848a4", | |
"hash_input_tokens": "a720b56aa7c52551", | |
"hash_cont_tokens": "daac20c5618ed703" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:movie_recommendation|3": { | |
"hashes": { | |
"hash_examples": "2d9dc4975935d31a", | |
"hash_full_prompts": "8e00606ed3407167", | |
"hash_input_tokens": "c825ab1c99245a17", | |
"hash_cont_tokens": "00faebfddd3ad134" | |
}, | |
"truncated": 249, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 249, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:navigate|3": { | |
"hashes": { | |
"hash_examples": "ba91dcdb9a064255", | |
"hash_full_prompts": "8d50c5baf1df7aef", | |
"hash_input_tokens": "f234e6b28ea1fa49", | |
"hash_cont_tokens": "62c3fdb3d1eb28de" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:reasoning_about_colored_objects|3": { | |
"hashes": { | |
"hash_examples": "a6ba328c4c3385d2", | |
"hash_full_prompts": "3d2441a21c12a960", | |
"hash_input_tokens": "f3b577892955aa84", | |
"hash_cont_tokens": "dcb665080b66ff23" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:ruin_names|3": { | |
"hashes": { | |
"hash_examples": "2ef28d5f2d4fdd25", | |
"hash_full_prompts": "ba95caa786f313b1", | |
"hash_input_tokens": "9954b30d4205604a", | |
"hash_cont_tokens": "a88425b4cd016660" | |
}, | |
"truncated": 248, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 248, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:salient_translation_error_detection|3": { | |
"hashes": { | |
"hash_examples": "c13f25ec8ffed496", | |
"hash_full_prompts": "a8512d174e1cab8f", | |
"hash_input_tokens": "3e738df24b7eddf8", | |
"hash_cont_tokens": "1ff03589170a9c22" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:snarks|3": { | |
"hashes": { | |
"hash_examples": "5f6db7bff7f6f22e", | |
"hash_full_prompts": "ff91d81466b9041f", | |
"hash_input_tokens": "21388b09e13d0208", | |
"hash_cont_tokens": "a0e9d30060362261" | |
}, | |
"truncated": 178, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 178, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:sports_understanding|3": { | |
"hashes": { | |
"hash_examples": "042afbe5d9c1f02d", | |
"hash_full_prompts": "a59324d9eb37e0f5", | |
"hash_input_tokens": "0ad41bb8d2290a5b", | |
"hash_cont_tokens": "2dd1a0bbe88c59fc" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:temporal_sequences|3": { | |
"hashes": { | |
"hash_examples": "803a05f352eb6afc", | |
"hash_full_prompts": "1b3971192bf481e7", | |
"hash_input_tokens": "3051b60940ccceab", | |
"hash_cont_tokens": "ad6342524c809638" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_five_objects|3": { | |
"hashes": { | |
"hash_examples": "2bbac6db7ab0d527", | |
"hash_full_prompts": "7ef4567d2fcf5094", | |
"hash_input_tokens": "b841310ee5531238", | |
"hash_cont_tokens": "8bfa64af5bd5d43e" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_seven_objects|3": { | |
"hashes": { | |
"hash_examples": "845caf093ac2b58c", | |
"hash_full_prompts": "196a0f8712857624", | |
"hash_input_tokens": "3e738df24b7eddf8", | |
"hash_cont_tokens": "62b9c4a2a02ea0a6" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"harness|bbh:tracking_shuffled_objects_three_objects|3": { | |
"hashes": { | |
"hash_examples": "9004f14d5a32b9a8", | |
"hash_full_prompts": "592a03f0518f17b6", | |
"hash_input_tokens": "19e0ef1dd5ae9d33", | |
"hash_cont_tokens": "8bf730633382d68c" | |
}, | |
"truncated": 250, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 250, | |
"effective_few_shots": 3.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "4ff1e3dc5703575d", | |
"hash_full_prompts": "0d80ce968d89d4ef", | |
"hash_input_tokens": "72bda1e7aeb34786", | |
"hash_cont_tokens": "2d6caa036b5ecdf8" | |
}, | |
"truncated": 4362, | |
"non_truncated": 0, | |
"padded": 0, | |
"non_padded": 4362, | |
"num_truncated_few_shots": 0 | |
} | |
} |