open-r1-eval-leaderboard / eval_results /google /gemma-7b-it /main /bbh /results_2024-03-18T20-38-30.588009.json
lewtun's picture
lewtun HF staff
Upload eval_results/google/gemma-7b-it/main/bbh/results_2024-03-18T20-38-30.588009.json with huggingface_hub
4c6f00d verified
raw
history blame
32.9 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 388159.158304847,
"end_time": 388593.174023988,
"total_evaluation_time_secondes": "434.0157191409962",
"model_name": "google/gemma-7b-it",
"model_sha": "6a0105cc6b24d312544a4880affcbd5d973d7868",
"model_dtype": "torch.bfloat16",
"model_size": "15.91 GB",
"config": null
},
"results": {
"harness|bbh:causal_judgment|3": {
"em": 0.35294117647058826,
"em_stderr": 0.03504019983419236,
"qem": 0.35294117647058826,
"qem_stderr": 0.03504019983419236,
"pem": 0.5347593582887701,
"pem_stderr": 0.03657308098518922,
"pqem": 0.5347593582887701,
"pqem_stderr": 0.03657308098518922,
"perfect_em": 0.35294117647058826,
"perfect_em_stderr": 0.03504019983419236
},
"harness|bbh:date_understanding|3": {
"em": 0.008,
"em_stderr": 0.005645483676690173,
"qem": 0.008,
"qem_stderr": 0.005645483676690173,
"pem": 0.008,
"pem_stderr": 0.005645483676690173,
"pqem": 0.192,
"pqem_stderr": 0.024960691989172005,
"perfect_em": 0.008,
"perfect_em_stderr": 0.005645483676690173
},
"harness|bbh:disambiguation_qa|3": {
"em": 0.444,
"em_stderr": 0.03148684942554575,
"qem": 0.444,
"qem_stderr": 0.03148684942554575,
"pem": 0.468,
"pem_stderr": 0.031621252575725504,
"pqem": 0.704,
"pqem_stderr": 0.028928939388379635,
"perfect_em": 0.444,
"perfect_em_stderr": 0.03148684942554575
},
"harness|bbh:geometric_shapes|3": {
"em": 0.0,
"em_stderr": 0.0,
"qem": 0.0,
"qem_stderr": 0.0,
"pem": 0.0,
"pem_stderr": 0.0,
"pqem": 0.0,
"pqem_stderr": 0.0,
"perfect_em": 0.0,
"perfect_em_stderr": 0.0
},
"harness|bbh:logical_deduction_five_objects|3": {
"em": 0.112,
"em_stderr": 0.019985536939171433,
"qem": 0.112,
"qem_stderr": 0.019985536939171433,
"pem": 0.112,
"pem_stderr": 0.019985536939171433,
"pqem": 0.244,
"pqem_stderr": 0.02721799546455318,
"perfect_em": 0.112,
"perfect_em_stderr": 0.019985536939171433
},
"harness|bbh:logical_deduction_seven_objects|3": {
"em": 0.0,
"em_stderr": 0.0,
"qem": 0.0,
"qem_stderr": 0.0,
"pem": 0.0,
"pem_stderr": 0.0,
"pqem": 0.148,
"pqem_stderr": 0.02250354724380615,
"perfect_em": 0.0,
"perfect_em_stderr": 0.0
},
"harness|bbh:logical_deduction_three_objects|3": {
"em": 0.0,
"em_stderr": 0.0,
"qem": 0.0,
"qem_stderr": 0.0,
"pem": 0.0,
"pem_stderr": 0.0,
"pqem": 0.32,
"pqem_stderr": 0.029561724955241037,
"perfect_em": 0.0,
"perfect_em_stderr": 0.0
},
"harness|bbh:movie_recommendation|3": {
"em": 0.3493975903614458,
"em_stderr": 0.030275543348462015,
"qem": 0.3493975903614458,
"qem_stderr": 0.030275543348462015,
"pem": 0.42570281124497994,
"pem_stderr": 0.03139755022695944,
"pqem": 0.5261044176706827,
"pqem_stderr": 0.0317067307000546,
"perfect_em": 0.3493975903614458,
"perfect_em_stderr": 0.030275543348462015
},
"harness|bbh:navigate|3": {
"em": 0.424,
"em_stderr": 0.031318034374916155,
"qem": 0.424,
"qem_stderr": 0.031318034374916155,
"pem": 0.424,
"pem_stderr": 0.031318034374916155,
"pqem": 0.424,
"pqem_stderr": 0.031318034374916155,
"perfect_em": 0.424,
"perfect_em_stderr": 0.031318034374916155
},
"harness|bbh:reasoning_about_colored_objects|3": {
"em": 0.116,
"em_stderr": 0.02029342980308385,
"qem": 0.116,
"qem_stderr": 0.02029342980308385,
"pem": 0.2,
"pem_stderr": 0.025348970020979078,
"pqem": 0.34,
"pqem_stderr": 0.030020073605457907,
"perfect_em": 0.116,
"perfect_em_stderr": 0.02029342980308385
},
"harness|bbh:ruin_names|3": {
"em": 0.028225806451612902,
"em_stderr": 0.010537980335995019,
"qem": 0.028225806451612902,
"qem_stderr": 0.010537980335995019,
"pem": 0.028225806451612902,
"pem_stderr": 0.010537980335995019,
"pqem": 0.3064516129032258,
"pqem_stderr": 0.029333971259396716,
"perfect_em": 0.028225806451612902,
"perfect_em_stderr": 0.010537980335995019
},
"harness|bbh:salient_translation_error_detection|3": {
"em": 0.0,
"em_stderr": 0.0,
"qem": 0.0,
"qem_stderr": 0.0,
"pem": 0.0,
"pem_stderr": 0.0,
"pqem": 0.14,
"pqem_stderr": 0.021989409645240265,
"perfect_em": 0.0,
"perfect_em_stderr": 0.0
},
"harness|bbh:snarks|3": {
"em": 0.0,
"em_stderr": 0.0,
"qem": 0.0,
"qem_stderr": 0.0,
"pem": 0.0,
"pem_stderr": 0.0,
"pqem": 0.4606741573033708,
"pqem_stderr": 0.0374658773638787,
"perfect_em": 0.0,
"perfect_em_stderr": 0.0
},
"harness|bbh:sports_understanding|3": {
"em": 0.532,
"em_stderr": 0.031621252575725504,
"qem": 0.532,
"qem_stderr": 0.031621252575725504,
"pem": 0.532,
"pem_stderr": 0.031621252575725504,
"pqem": 0.532,
"pqem_stderr": 0.031621252575725504,
"perfect_em": 0.532,
"perfect_em_stderr": 0.031621252575725504
},
"harness|bbh:temporal_sequences|3": {
"em": 0.0,
"em_stderr": 0.0,
"qem": 0.0,
"qem_stderr": 0.0,
"pem": 0.0,
"pem_stderr": 0.0,
"pqem": 0.284,
"pqem_stderr": 0.02857695873043742,
"perfect_em": 0.0,
"perfect_em_stderr": 0.0
},
"harness|bbh:tracking_shuffled_objects_five_objects|3": {
"em": 0.008,
"em_stderr": 0.005645483676690171,
"qem": 0.008,
"qem_stderr": 0.005645483676690171,
"pem": 0.024,
"pem_stderr": 0.009699087026964245,
"pqem": 0.212,
"pqem_stderr": 0.02590188469054116,
"perfect_em": 0.008,
"perfect_em_stderr": 0.005645483676690171
},
"harness|bbh:tracking_shuffled_objects_seven_objects|3": {
"em": 0.0,
"em_stderr": 0.0,
"qem": 0.0,
"qem_stderr": 0.0,
"pem": 0.008,
"pem_stderr": 0.005645483676690167,
"pqem": 0.264,
"pqem_stderr": 0.027934518957690908,
"perfect_em": 0.0,
"perfect_em_stderr": 0.0
},
"harness|bbh:tracking_shuffled_objects_three_objects|3": {
"em": 0.048,
"em_stderr": 0.01354688422808569,
"qem": 0.048,
"qem_stderr": 0.01354688422808569,
"pem": 0.104,
"pem_stderr": 0.019345100974843887,
"pqem": 0.316,
"pqem_stderr": 0.02946265759857868,
"perfect_em": 0.048,
"perfect_em_stderr": 0.01354688422808569
},
"harness|bbh:_average|3": {
"em": 0.1345869207379804,
"em_stderr": 0.01307759323436434,
"qem": 0.1345869207379804,
"qem_stderr": 0.01307759323436434,
"pem": 0.15937155422140906,
"pem_stderr": 0.014374378521658324,
"pqem": 0.3304438636758916,
"pqem_stderr": 0.0275042971960144,
"perfect_em": 0.1345869207379804,
"perfect_em_stderr": 0.01307759323436434
}
},
"versions": {
"harness|bbh:causal_judgment|3": 0,
"harness|bbh:date_understanding|3": 0,
"harness|bbh:disambiguation_qa|3": 0,
"harness|bbh:geometric_shapes|3": 0,
"harness|bbh:logical_deduction_five_objects|3": 0,
"harness|bbh:logical_deduction_seven_objects|3": 0,
"harness|bbh:logical_deduction_three_objects|3": 0,
"harness|bbh:movie_recommendation|3": 0,
"harness|bbh:navigate|3": 0,
"harness|bbh:reasoning_about_colored_objects|3": 0,
"harness|bbh:ruin_names|3": 0,
"harness|bbh:salient_translation_error_detection|3": 0,
"harness|bbh:snarks|3": 0,
"harness|bbh:sports_understanding|3": 0,
"harness|bbh:temporal_sequences|3": 0,
"harness|bbh:tracking_shuffled_objects_five_objects|3": 0,
"harness|bbh:tracking_shuffled_objects_seven_objects|3": 0,
"harness|bbh:tracking_shuffled_objects_three_objects|3": 0
},
"config_tasks": {
"harness|bbh:causal_judgment": {
"name": "bbh:causal_judgment",
"prompt_function": "bbh_causal_judgment",
"hf_repo": "lukaemon/bbh",
"hf_subset": "causal_judgement",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 187,
"effective_num_docs": 187,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:date_understanding": {
"name": "bbh:date_understanding",
"prompt_function": "bbh_date_understanding",
"hf_repo": "lukaemon/bbh",
"hf_subset": "date_understanding",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:disambiguation_qa": {
"name": "bbh:disambiguation_qa",
"prompt_function": "bbh_disambiguation_qa",
"hf_repo": "lukaemon/bbh",
"hf_subset": "disambiguation_qa",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:geometric_shapes": {
"name": "bbh:geometric_shapes",
"prompt_function": "bbh_geometric_shapes",
"hf_repo": "lukaemon/bbh",
"hf_subset": "geometric_shapes",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:logical_deduction_five_objects": {
"name": "bbh:logical_deduction_five_objects",
"prompt_function": "bbh_logical_deduction_five_objects",
"hf_repo": "lukaemon/bbh",
"hf_subset": "logical_deduction_five_objects",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:logical_deduction_seven_objects": {
"name": "bbh:logical_deduction_seven_objects",
"prompt_function": "bbh_logical_deduction_seven_objects",
"hf_repo": "lukaemon/bbh",
"hf_subset": "logical_deduction_seven_objects",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:logical_deduction_three_objects": {
"name": "bbh:logical_deduction_three_objects",
"prompt_function": "bbh_logical_deduction_three_objects",
"hf_repo": "lukaemon/bbh",
"hf_subset": "logical_deduction_three_objects",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:movie_recommendation": {
"name": "bbh:movie_recommendation",
"prompt_function": "bbh_movie_recommendation",
"hf_repo": "lukaemon/bbh",
"hf_subset": "movie_recommendation",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 249,
"effective_num_docs": 249,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:navigate": {
"name": "bbh:navigate",
"prompt_function": "bbh_navigate",
"hf_repo": "lukaemon/bbh",
"hf_subset": "navigate",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:reasoning_about_colored_objects": {
"name": "bbh:reasoning_about_colored_objects",
"prompt_function": "bbh_reasoning_about_colored_objects",
"hf_repo": "lukaemon/bbh",
"hf_subset": "reasoning_about_colored_objects",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:ruin_names": {
"name": "bbh:ruin_names",
"prompt_function": "bbh_ruin_names",
"hf_repo": "lukaemon/bbh",
"hf_subset": "ruin_names",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 248,
"effective_num_docs": 248,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:salient_translation_error_detection": {
"name": "bbh:salient_translation_error_detection",
"prompt_function": "bbh_salient_translation_error_detection",
"hf_repo": "lukaemon/bbh",
"hf_subset": "salient_translation_error_detection",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:snarks": {
"name": "bbh:snarks",
"prompt_function": "bbh_snarks",
"hf_repo": "lukaemon/bbh",
"hf_subset": "snarks",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 178,
"effective_num_docs": 178,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:sports_understanding": {
"name": "bbh:sports_understanding",
"prompt_function": "bbh_sports_understanding",
"hf_repo": "lukaemon/bbh",
"hf_subset": "sports_understanding",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:temporal_sequences": {
"name": "bbh:temporal_sequences",
"prompt_function": "bbh_temporal_sequences",
"hf_repo": "lukaemon/bbh",
"hf_subset": "temporal_sequences",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:tracking_shuffled_objects_five_objects": {
"name": "bbh:tracking_shuffled_objects_five_objects",
"prompt_function": "bbh_tracking_shuffled_objects_five_objects",
"hf_repo": "lukaemon/bbh",
"hf_subset": "tracking_shuffled_objects_five_objects",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:tracking_shuffled_objects_seven_objects": {
"name": "bbh:tracking_shuffled_objects_seven_objects",
"prompt_function": "bbh_tracking_shuffled_objects_seven_objects",
"hf_repo": "lukaemon/bbh",
"hf_subset": "tracking_shuffled_objects_seven_objects",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"harness|bbh:tracking_shuffled_objects_three_objects": {
"name": "bbh:tracking_shuffled_objects_three_objects",
"prompt_function": "bbh_tracking_shuffled_objects_three_objects",
"hf_repo": "lukaemon/bbh",
"hf_subset": "tracking_shuffled_objects_three_objects",
"metric": [
"exact_match",
"quasi_exact_match",
"prefix_exact_match",
"prefix_quasi_exact_match",
"perfect_exact_match"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": null,
"generation_size": 20,
"stop_sequence": [
"</s>",
"Q:",
"\n\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"harness"
],
"original_num_docs": 250,
"effective_num_docs": 250,
"trust_dataset": true,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"harness|bbh:causal_judgment|3": {
"hashes": {
"hash_examples": "63218f5ae055ab2b",
"hash_full_prompts": "da4486f76ca37d71",
"hash_input_tokens": "2f4439cd00aeae0e",
"hash_cont_tokens": "5b1799a3a014f1d0"
},
"truncated": 187,
"non_truncated": 0,
"padded": 0,
"non_padded": 187,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:date_understanding|3": {
"hashes": {
"hash_examples": "f145c7a06def3c8e",
"hash_full_prompts": "ab81948b0abe57b9",
"hash_input_tokens": "d36736b69b6676a1",
"hash_cont_tokens": "c5b75e47b5638f06"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:disambiguation_qa|3": {
"hashes": {
"hash_examples": "19677fd1773f7eb9",
"hash_full_prompts": "7a5061f70150c09e",
"hash_input_tokens": "1f6e1e8254e2c651",
"hash_cont_tokens": "8c18873ca6b0ecc2"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:geometric_shapes|3": {
"hashes": {
"hash_examples": "76c7b11a13cc72a9",
"hash_full_prompts": "e063deff2e84a2a6",
"hash_input_tokens": "f6cae0e55367ae6c",
"hash_cont_tokens": "746d14a5ad44a917"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:logical_deduction_five_objects|3": {
"hashes": {
"hash_examples": "0e958c856332a745",
"hash_full_prompts": "7544b780705e9654",
"hash_input_tokens": "069f2e5e4f4819e1",
"hash_cont_tokens": "729c1105fc97af0a"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:logical_deduction_seven_objects|3": {
"hashes": {
"hash_examples": "ab9de25a5eb40d09",
"hash_full_prompts": "9ba71ddb2add0b58",
"hash_input_tokens": "61154b5b2d23fd61",
"hash_cont_tokens": "b51c0b77ff9d6d57"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:logical_deduction_three_objects|3": {
"hashes": {
"hash_examples": "3c6bf52517714218",
"hash_full_prompts": "78aebd4f41995a84",
"hash_input_tokens": "8611de524f2cb180",
"hash_cont_tokens": "9f1a7db48f31a21f"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:movie_recommendation|3": {
"hashes": {
"hash_examples": "2d9dc4975935d31a",
"hash_full_prompts": "290a864a2acc3828",
"hash_input_tokens": "dd866e9aefb525a2",
"hash_cont_tokens": "6518272efad278e0"
},
"truncated": 249,
"non_truncated": 0,
"padded": 0,
"non_padded": 249,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:navigate|3": {
"hashes": {
"hash_examples": "ba91dcdb9a064255",
"hash_full_prompts": "4d8fad912ba90363",
"hash_input_tokens": "aea22a5b526e9934",
"hash_cont_tokens": "be8e7bc6c5b47c90"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:reasoning_about_colored_objects|3": {
"hashes": {
"hash_examples": "a6ba328c4c3385d2",
"hash_full_prompts": "96ca31772c57eeeb",
"hash_input_tokens": "f31e7a9ef88c8872",
"hash_cont_tokens": "5819c5197b25c2e5"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:ruin_names|3": {
"hashes": {
"hash_examples": "2ef28d5f2d4fdd25",
"hash_full_prompts": "f3ad065935c46de7",
"hash_input_tokens": "5cc6ead631865b9a",
"hash_cont_tokens": "f8ab1046d20bf69f"
},
"truncated": 248,
"non_truncated": 0,
"padded": 0,
"non_padded": 248,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:salient_translation_error_detection|3": {
"hashes": {
"hash_examples": "c13f25ec8ffed496",
"hash_full_prompts": "80fc33e0cd811a8d",
"hash_input_tokens": "6004f29b100793e5",
"hash_cont_tokens": "1d657567eb26eadb"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:snarks|3": {
"hashes": {
"hash_examples": "5f6db7bff7f6f22e",
"hash_full_prompts": "55586142735dea08",
"hash_input_tokens": "f78df86ae6e4e291",
"hash_cont_tokens": "9bc1f9cced90bcbf"
},
"truncated": 178,
"non_truncated": 0,
"padded": 0,
"non_padded": 178,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:sports_understanding|3": {
"hashes": {
"hash_examples": "042afbe5d9c1f02d",
"hash_full_prompts": "1c523efd25e2ebfd",
"hash_input_tokens": "176635253aa6d0e9",
"hash_cont_tokens": "6a86177c6456f503"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:temporal_sequences|3": {
"hashes": {
"hash_examples": "803a05f352eb6afc",
"hash_full_prompts": "caa56ea50f0068e1",
"hash_input_tokens": "74b504d9b40add38",
"hash_cont_tokens": "678f18cf00e47c3a"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:tracking_shuffled_objects_five_objects|3": {
"hashes": {
"hash_examples": "2bbac6db7ab0d527",
"hash_full_prompts": "4c9e2f3952f99f7d",
"hash_input_tokens": "36d0e7377cce953e",
"hash_cont_tokens": "179f7b1e3415d569"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:tracking_shuffled_objects_seven_objects|3": {
"hashes": {
"hash_examples": "845caf093ac2b58c",
"hash_full_prompts": "d4f4f0656462d45f",
"hash_input_tokens": "d255d2f245886c58",
"hash_cont_tokens": "8e38e96fea2ebbbb"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
},
"harness|bbh:tracking_shuffled_objects_three_objects|3": {
"hashes": {
"hash_examples": "9004f14d5a32b9a8",
"hash_full_prompts": "be9c55b344ca07ab",
"hash_input_tokens": "55c63804a84860f6",
"hash_cont_tokens": "59a2bddec05be2ad"
},
"truncated": 250,
"non_truncated": 0,
"padded": 0,
"non_padded": 250,
"effective_few_shots": 3.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "4ff1e3dc5703575d",
"hash_full_prompts": "ec35636f85fd5ee2",
"hash_input_tokens": "016924e640ab4527",
"hash_cont_tokens": "d26a94a5f76b2c1d"
},
"truncated": 4362,
"non_truncated": 0,
"padded": 0,
"non_padded": 4362,
"num_truncated_few_shots": 0
}
}