open-r1-eval-leaderboard
/
eval_results
/alpindale
/WizardLM-2-8x22B
/main
/bbh
/results_2024-04-16T07-13-03.245435.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 573205.331334793, | |
"end_time": 575098.948965231, | |
"total_evaluation_time_secondes": "1893.6176304379478", | |
"model_name": "alpindale/WizardLM-2-8x22B", | |
"model_sha": "dd9964910e87b3876a2959911a29cbdb1d518180", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "263.68 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|bigbench:causal_judgment|0": { | |
"acc": 0.631578947368421, | |
"acc_stderr": 0.03508771929824559 | |
}, | |
"lighteval|bigbench:date_understanding|0": { | |
"acc": 0.7750677506775068, | |
"acc_stderr": 0.02176565731643199 | |
}, | |
"lighteval|bigbench:disambiguation_qa|0": { | |
"acc": 0.5116279069767442, | |
"acc_stderr": 0.031180707879849067 | |
}, | |
"lighteval|bigbench:geometric_shapes|0": { | |
"acc": 0.25277777777777777, | |
"acc_stderr": 0.022937554374151313 | |
}, | |
"lighteval|bigbench:logical_deduction_five_objects|0": { | |
"acc": 0.292, | |
"acc_stderr": 0.020354375480530085 | |
}, | |
"lighteval|bigbench:logical_deduction_seven_objects|0": { | |
"acc": 0.17, | |
"acc_stderr": 0.01420773731858363 | |
}, | |
"lighteval|bigbench:logical_deduction_three_objects|0": { | |
"acc": 0.38, | |
"acc_stderr": 0.02807062283278968 | |
}, | |
"lighteval|bigbench:movie_recommendation|0": { | |
"acc": 0.346, | |
"acc_stderr": 0.021294951277234637 | |
}, | |
"lighteval|bigbench:navigate|0": { | |
"acc": 0.563, | |
"acc_stderr": 0.015693223928730373 | |
}, | |
"lighteval|bigbench:reasoning_about_colored_objects|0": { | |
"acc": 0.554, | |
"acc_stderr": 0.011117724672834362 | |
}, | |
"lighteval|bigbench:ruin_names|0": { | |
"acc": 0.65625, | |
"acc_stderr": 0.02246478414865448 | |
}, | |
"lighteval|bigbench:salient_translation_error_detection|0": { | |
"acc": 0.18837675350701402, | |
"acc_stderr": 0.012383497295229416 | |
}, | |
"lighteval|bigbench:snarks|0": { | |
"acc": 0.7790055248618785, | |
"acc_stderr": 0.03092606928866788 | |
}, | |
"lighteval|bigbench:sports_understanding|0": { | |
"acc": 0.719, | |
"acc_stderr": 0.014221154708434955 | |
}, | |
"lighteval|bigbench:temporal_sequences|0": { | |
"acc": 0.74, | |
"acc_stderr": 0.013877773329774168 | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_five_objects|0": { | |
"acc": 0.204, | |
"acc_stderr": 0.01140224578612368 | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_seven_objects|0": { | |
"acc": 0.1417142857142857, | |
"acc_stderr": 0.008339266507816004 | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_three_objects|0": { | |
"acc": 0.38, | |
"acc_stderr": 0.02807062283278968 | |
}, | |
"lighteval|bigbench:_average|0": { | |
"acc": 0.4602443859379793, | |
"acc_stderr": 0.020188649348715056 | |
}, | |
"all": { | |
"acc": 0.4602443859379793, | |
"acc_stderr": 0.020188649348715056 | |
} | |
}, | |
"versions": { | |
"lighteval|bigbench:causal_judgment|0": 0, | |
"lighteval|bigbench:date_understanding|0": 0, | |
"lighteval|bigbench:disambiguation_qa|0": 0, | |
"lighteval|bigbench:geometric_shapes|0": 0, | |
"lighteval|bigbench:logical_deduction_five_objects|0": 0, | |
"lighteval|bigbench:logical_deduction_seven_objects|0": 0, | |
"lighteval|bigbench:logical_deduction_three_objects|0": 0, | |
"lighteval|bigbench:movie_recommendation|0": 0, | |
"lighteval|bigbench:navigate|0": 0, | |
"lighteval|bigbench:reasoning_about_colored_objects|0": 0, | |
"lighteval|bigbench:ruin_names|0": 0, | |
"lighteval|bigbench:salient_translation_error_detection|0": 0, | |
"lighteval|bigbench:snarks|0": 0, | |
"lighteval|bigbench:sports_understanding|0": 0, | |
"lighteval|bigbench:temporal_sequences|0": 0, | |
"lighteval|bigbench:tracking_shuffled_objects_five_objects|0": 0, | |
"lighteval|bigbench:tracking_shuffled_objects_seven_objects|0": 0, | |
"lighteval|bigbench:tracking_shuffled_objects_three_objects|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|bigbench:causal_judgment": { | |
"name": "bigbench:causal_judgment", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "causal_judgement", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 190, | |
"effective_num_docs": 190, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:date_understanding": { | |
"name": "bigbench:date_understanding", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "date_understanding", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 369, | |
"effective_num_docs": 369, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:disambiguation_qa": { | |
"name": "bigbench:disambiguation_qa", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "disambiguation_qa", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 258, | |
"effective_num_docs": 258, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:geometric_shapes": { | |
"name": "bigbench:geometric_shapes", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "geometric_shapes", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 360, | |
"effective_num_docs": 360, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:logical_deduction_five_objects": { | |
"name": "bigbench:logical_deduction_five_objects", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "logical_deduction_five_objects", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 500, | |
"effective_num_docs": 500, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:logical_deduction_seven_objects": { | |
"name": "bigbench:logical_deduction_seven_objects", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "logical_deduction_seven_objects", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 700, | |
"effective_num_docs": 700, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:logical_deduction_three_objects": { | |
"name": "bigbench:logical_deduction_three_objects", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "logical_deduction_three_objects", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 300, | |
"effective_num_docs": 300, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:movie_recommendation": { | |
"name": "bigbench:movie_recommendation", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "movie_recommendation", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 500, | |
"effective_num_docs": 500, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:navigate": { | |
"name": "bigbench:navigate", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "navigate", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1000, | |
"effective_num_docs": 1000, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:reasoning_about_colored_objects": { | |
"name": "bigbench:reasoning_about_colored_objects", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "reasoning_about_colored_objects", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 2000, | |
"effective_num_docs": 2000, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:ruin_names": { | |
"name": "bigbench:ruin_names", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "ruin_names", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 448, | |
"effective_num_docs": 448, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:salient_translation_error_detection": { | |
"name": "bigbench:salient_translation_error_detection", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "salient_translation_error_detection", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 998, | |
"effective_num_docs": 998, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:snarks": { | |
"name": "bigbench:snarks", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "snarks", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 181, | |
"effective_num_docs": 181, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:sports_understanding": { | |
"name": "bigbench:sports_understanding", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "sports_understanding", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1000, | |
"effective_num_docs": 1000, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:temporal_sequences": { | |
"name": "bigbench:temporal_sequences", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "temporal_sequences", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1000, | |
"effective_num_docs": 1000, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_five_objects": { | |
"name": "bigbench:tracking_shuffled_objects_five_objects", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "tracking_shuffled_objects_five_objects", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1250, | |
"effective_num_docs": 1250, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_seven_objects": { | |
"name": "bigbench:tracking_shuffled_objects_seven_objects", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "tracking_shuffled_objects_seven_objects", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1750, | |
"effective_num_docs": 1750, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_three_objects": { | |
"name": "bigbench:tracking_shuffled_objects_three_objects", | |
"prompt_function": "bbh_lighteval", | |
"hf_repo": "lighteval/bbh", | |
"hf_subset": "tracking_shuffled_objects_three_objects", | |
"metric": [ | |
"loglikelihood_acc_single_token" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"</s>", | |
"Q:", | |
"\n\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 300, | |
"effective_num_docs": 300, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|bigbench:causal_judgment|0": { | |
"hashes": { | |
"hash_examples": "dfb1ae47218f2850", | |
"hash_full_prompts": "4c7f2f76409f70f4", | |
"hash_input_tokens": "8247542cfbb85463", | |
"hash_cont_tokens": "09afe7f90e2e5f87" | |
}, | |
"truncated": 0, | |
"non_truncated": 190, | |
"padded": 189, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:date_understanding|0": { | |
"hashes": { | |
"hash_examples": "2b823c41500a6ec2", | |
"hash_full_prompts": "e83dd502c036bded", | |
"hash_input_tokens": "28f8d05817153ac7", | |
"hash_cont_tokens": "793a4f414457989c" | |
}, | |
"truncated": 0, | |
"non_truncated": 369, | |
"padded": 369, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:disambiguation_qa|0": { | |
"hashes": { | |
"hash_examples": "2a4c3d41db198cea", | |
"hash_full_prompts": "9cf3c0f75fc49921", | |
"hash_input_tokens": "c3c7f3dc9c52a93d", | |
"hash_cont_tokens": "f88079a6d2897e9d" | |
}, | |
"truncated": 0, | |
"non_truncated": 258, | |
"padded": 258, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:geometric_shapes|0": { | |
"hashes": { | |
"hash_examples": "24aa261103911b72", | |
"hash_full_prompts": "862e0cf9b72a95a4", | |
"hash_input_tokens": "e39218908b75f075", | |
"hash_cont_tokens": "cba7422d323578aa" | |
}, | |
"truncated": 0, | |
"non_truncated": 360, | |
"padded": 360, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:logical_deduction_five_objects|0": { | |
"hashes": { | |
"hash_examples": "cb5bdc92afc41f83", | |
"hash_full_prompts": "238b886ee1e7b36b", | |
"hash_input_tokens": "6c702fb1803ac3b0", | |
"hash_cont_tokens": "39da984a4fe8338e" | |
}, | |
"truncated": 0, | |
"non_truncated": 500, | |
"padded": 500, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:logical_deduction_seven_objects|0": { | |
"hashes": { | |
"hash_examples": "b6805ea696739f9f", | |
"hash_full_prompts": "ec2aefb5b9986a54", | |
"hash_input_tokens": "80af2a8d7c0592a6", | |
"hash_cont_tokens": "b00feb2633bf2515" | |
}, | |
"truncated": 0, | |
"non_truncated": 700, | |
"padded": 700, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:logical_deduction_three_objects|0": { | |
"hashes": { | |
"hash_examples": "0509e5712ab9bcdb", | |
"hash_full_prompts": "f369763a9ab136d8", | |
"hash_input_tokens": "d2a9b05ce5d0a360", | |
"hash_cont_tokens": "271171373fbf2b7c" | |
}, | |
"truncated": 0, | |
"non_truncated": 300, | |
"padded": 300, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:movie_recommendation|0": { | |
"hashes": { | |
"hash_examples": "530cc6f737830f45", | |
"hash_full_prompts": "7aee02c6af822316", | |
"hash_input_tokens": "314a72d314ada8c7", | |
"hash_cont_tokens": "be520838bf2427bc" | |
}, | |
"truncated": 0, | |
"non_truncated": 500, | |
"padded": 500, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:navigate|0": { | |
"hashes": { | |
"hash_examples": "7962ef85d0058b9a", | |
"hash_full_prompts": "b6eff183ee6f8e88", | |
"hash_input_tokens": "8348a0d226e05705", | |
"hash_cont_tokens": "22ae558c49b5ff4d" | |
}, | |
"truncated": 0, | |
"non_truncated": 1000, | |
"padded": 1000, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:reasoning_about_colored_objects|0": { | |
"hashes": { | |
"hash_examples": "39be1ab1677a651d", | |
"hash_full_prompts": "897f7f976f40ec00", | |
"hash_input_tokens": "190128890cb65d93", | |
"hash_cont_tokens": "e7452d55b736faf5" | |
}, | |
"truncated": 0, | |
"non_truncated": 2000, | |
"padded": 2000, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:ruin_names|0": { | |
"hashes": { | |
"hash_examples": "e9b96b31d2154941", | |
"hash_full_prompts": "cad13f7801d6f123", | |
"hash_input_tokens": "7a546a139cf74f3f", | |
"hash_cont_tokens": "592fca41307607f3" | |
}, | |
"truncated": 0, | |
"non_truncated": 448, | |
"padded": 445, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:salient_translation_error_detection|0": { | |
"hashes": { | |
"hash_examples": "951ac59f7ad0427d", | |
"hash_full_prompts": "7c45b95866240537", | |
"hash_input_tokens": "96196fed80aa266b", | |
"hash_cont_tokens": "f9b8b1dbdec445d9" | |
}, | |
"truncated": 0, | |
"non_truncated": 998, | |
"padded": 998, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:snarks|0": { | |
"hashes": { | |
"hash_examples": "3a53eb9b9d758534", | |
"hash_full_prompts": "713ab9a15534316c", | |
"hash_input_tokens": "766c8278c75f551e", | |
"hash_cont_tokens": "42e74ab03bc6b590" | |
}, | |
"truncated": 0, | |
"non_truncated": 181, | |
"padded": 176, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:sports_understanding|0": { | |
"hashes": { | |
"hash_examples": "bd65741f00770373", | |
"hash_full_prompts": "241b2cf353df5ecf", | |
"hash_input_tokens": "fbdcaa46d1719737", | |
"hash_cont_tokens": "b6abaac1ccbc4b03" | |
}, | |
"truncated": 0, | |
"non_truncated": 1000, | |
"padded": 1000, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:temporal_sequences|0": { | |
"hashes": { | |
"hash_examples": "1d13139f47cb2df7", | |
"hash_full_prompts": "729db25fed2f940e", | |
"hash_input_tokens": "c20215a73e1e9d70", | |
"hash_cont_tokens": "12c05a5e2f876fb8" | |
}, | |
"truncated": 0, | |
"non_truncated": 1000, | |
"padded": 1000, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_five_objects|0": { | |
"hashes": { | |
"hash_examples": "8770a702a9646648", | |
"hash_full_prompts": "74fbca2d47f8529e", | |
"hash_input_tokens": "023f4d795c48e0f1", | |
"hash_cont_tokens": "0178491b6073ecc9" | |
}, | |
"truncated": 0, | |
"non_truncated": 1250, | |
"padded": 1198, | |
"non_padded": 52, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_seven_objects|0": { | |
"hashes": { | |
"hash_examples": "b469b7d073824a59", | |
"hash_full_prompts": "bb13b4b0abdc1636", | |
"hash_input_tokens": "c1f567ff0a9e7b23", | |
"hash_cont_tokens": "4ebb69b0c86b0b9c" | |
}, | |
"truncated": 0, | |
"non_truncated": 1750, | |
"padded": 1749, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|bigbench:tracking_shuffled_objects_three_objects|0": { | |
"hashes": { | |
"hash_examples": "0509e5712ab9bcdb", | |
"hash_full_prompts": "f369763a9ab136d8", | |
"hash_input_tokens": "8f97dcbb5d45daaa", | |
"hash_cont_tokens": "e0d35459e8b5e2fb" | |
}, | |
"truncated": 0, | |
"non_truncated": 300, | |
"padded": 294, | |
"non_padded": 6, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "51a30c4501ba4586", | |
"hash_full_prompts": "a5ca2bdc97de22da", | |
"hash_input_tokens": "8bf997ae621bcf29", | |
"hash_cont_tokens": "532758df212c79d2" | |
}, | |
"truncated": 0, | |
"non_truncated": 13104, | |
"padded": 13036, | |
"non_padded": 68, | |
"num_truncated_few_shots": 0 | |
} | |
} |