open-r1-eval-leaderboard
/
eval_results
/mistralai
/Mixtral-8x7B-Instruct-v0.1
/main
/agieval
/results_2024-03-28T17-03-23.832437.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 5101339.683116914, | |
"end_time": 5103831.020885923, | |
"total_evaluation_time_secondes": "2491.337769009173", | |
"model_name": "mistralai/Mixtral-8x7B-Instruct-v0.1", | |
"model_sha": "1e637f2d7cb0a9d6fb1922f305cb784995190a83", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "87.49 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|agieval:aqua-rat|0": { | |
"acc": 0.2992125984251969, | |
"acc_stderr": 0.02878875077905148, | |
"acc_norm": 0.2874015748031496, | |
"acc_norm_stderr": 0.028451603010183638 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"acc": 0.4857142857142857, | |
"acc_stderr": 0.0345716036894725, | |
"acc_norm": 0.49047619047619045, | |
"acc_norm_stderr": 0.034579448570031264 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"acc": 0.36231884057971014, | |
"acc_stderr": 0.033489883876211865, | |
"acc_norm": 0.391304347826087, | |
"acc_norm_stderr": 0.034003516037393544 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"acc": 0.4186991869918699, | |
"acc_stderr": 0.031518713443921945, | |
"acc_norm": 0.4105691056910569, | |
"acc_norm_stderr": 0.03142870877522598 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"acc": 0.7941176470588235, | |
"acc_stderr": 0.0231527224394023, | |
"acc_norm": 0.7941176470588235, | |
"acc_norm_stderr": 0.0231527224394023 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"acc": 0.5628140703517588, | |
"acc_stderr": 0.0352519354412315, | |
"acc_norm": 0.5577889447236181, | |
"acc_norm_stderr": 0.03529532245511804 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"acc": 0.548936170212766, | |
"acc_stderr": 0.03252909619613197, | |
"acc_norm": 0.5829787234042553, | |
"acc_norm_stderr": 0.032232762667117124 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"acc": 0.28774928774928776, | |
"acc_stderr": 0.024198561654366728, | |
"acc_norm": 0.2792022792022792, | |
"acc_norm_stderr": 0.023979060299146253 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"acc": 0.44, | |
"acc_stderr": 0.035187937631720775, | |
"acc_norm": 0.42, | |
"acc_norm_stderr": 0.0349874349304872 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"acc": 0.45314900153609833, | |
"acc_stderr": 0.01952532808521508, | |
"acc_norm": 0.45314900153609833, | |
"acc_norm_stderr": 0.019525328085215076 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"acc": 0.4377880184331797, | |
"acc_stderr": 0.01945921452085126, | |
"acc_norm": 0.4423963133640553, | |
"acc_norm_stderr": 0.019481028850019715 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"acc": 0.21739130434782608, | |
"acc_stderr": 0.02725685083881996, | |
"acc_norm": 0.21739130434782608, | |
"acc_norm_stderr": 0.02725685083881996 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"acc": 0.5294117647058824, | |
"acc_stderr": 0.022123734381431517, | |
"acc_norm": 0.5117647058823529, | |
"acc_norm_stderr": 0.02215597466931114 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"acc": 0.6505576208178439, | |
"acc_stderr": 0.029124821619700387, | |
"acc_norm": 0.6096654275092936, | |
"acc_norm_stderr": 0.029798671086842202 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"acc": 0.7766990291262136, | |
"acc_stderr": 0.02908672040309561, | |
"acc_norm": 0.7621359223300971, | |
"acc_norm_stderr": 0.029737449348865432 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"acc": 0.49029126213592233, | |
"acc_stderr": 0.034914930792419537, | |
"acc_norm": 0.4854368932038835, | |
"acc_norm_stderr": 0.03490669905098905 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"acc": 0.41363636363636364, | |
"acc_stderr": 0.03327904178966977, | |
"acc_norm": 0.4090909090909091, | |
"acc_norm_stderr": 0.03322371499864029 | |
}, | |
"lighteval|agieval:_average|0": { | |
"acc": 0.4804992030484134, | |
"acc_stderr": 0.029027049857806714, | |
"acc_norm": 0.4767570170852927, | |
"acc_norm_stderr": 0.029070370359576957 | |
}, | |
"all": { | |
"acc": 0.4804992030484134, | |
"acc_stderr": 0.029027049857806714, | |
"acc_norm": 0.4767570170852927, | |
"acc_norm_stderr": 0.029070370359576957 | |
} | |
}, | |
"versions": { | |
"lighteval|agieval:aqua-rat|0": 0, | |
"lighteval|agieval:gaokao-biology|0": 0, | |
"lighteval|agieval:gaokao-chemistry|0": 0, | |
"lighteval|agieval:gaokao-chinese|0": 0, | |
"lighteval|agieval:gaokao-english|0": 0, | |
"lighteval|agieval:gaokao-geography|0": 0, | |
"lighteval|agieval:gaokao-history|0": 0, | |
"lighteval|agieval:gaokao-mathqa|0": 0, | |
"lighteval|agieval:gaokao-physics|0": 0, | |
"lighteval|agieval:logiqa-en|0": 0, | |
"lighteval|agieval:logiqa-zh|0": 0, | |
"lighteval|agieval:lsat-ar|0": 0, | |
"lighteval|agieval:lsat-lr|0": 0, | |
"lighteval|agieval:lsat-rc|0": 0, | |
"lighteval|agieval:sat-en|0": 0, | |
"lighteval|agieval:sat-en-without-passage|0": 0, | |
"lighteval|agieval:sat-math|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|agieval:aqua-rat": { | |
"name": "agieval:aqua-rat", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-aqua-rat", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 254, | |
"effective_num_docs": 254, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-biology": { | |
"name": "agieval:gaokao-biology", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-biology", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chemistry": { | |
"name": "agieval:gaokao-chemistry", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chemistry", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 207, | |
"effective_num_docs": 207, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chinese": { | |
"name": "agieval:gaokao-chinese", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chinese", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 246, | |
"effective_num_docs": 246, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-english": { | |
"name": "agieval:gaokao-english", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-english", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-geography": { | |
"name": "agieval:gaokao-geography", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-geography", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 199, | |
"effective_num_docs": 199, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-history": { | |
"name": "agieval:gaokao-history", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-history", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-mathqa": { | |
"name": "agieval:gaokao-mathqa", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-mathqa", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 351, | |
"effective_num_docs": 351, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-physics": { | |
"name": "agieval:gaokao-physics", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-physics", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 200, | |
"effective_num_docs": 200, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-en": { | |
"name": "agieval:logiqa-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-zh": { | |
"name": "agieval:logiqa-zh", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-zh", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-ar": { | |
"name": "agieval:lsat-ar", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-ar", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 230, | |
"effective_num_docs": 230, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-lr": { | |
"name": "agieval:lsat-lr", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-lr", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 510, | |
"effective_num_docs": 510, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-rc": { | |
"name": "agieval:lsat-rc", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-rc", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 269, | |
"effective_num_docs": 269, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en": { | |
"name": "agieval:sat-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en-without-passage": { | |
"name": "agieval:sat-en-without-passage", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en-without-passage", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-math": { | |
"name": "agieval:sat-math", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-math", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 220, | |
"effective_num_docs": 220, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|agieval:aqua-rat|0": { | |
"hashes": { | |
"hash_examples": "f09607f69e5b7525", | |
"hash_full_prompts": "3861b27efd5a6170", | |
"hash_input_tokens": "24b3053366cab5d6", | |
"hash_cont_tokens": "7c2bcf33ed1e5ae3" | |
}, | |
"truncated": 0, | |
"non_truncated": 254, | |
"padded": 1270, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"hashes": { | |
"hash_examples": "f262eaf4a72db963", | |
"hash_full_prompts": "1ef14dcf5933ff33", | |
"hash_input_tokens": "3553b0507d1c6868", | |
"hash_cont_tokens": "b3febf85776696e1" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 840, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"hashes": { | |
"hash_examples": "47f2e649f58d9da5", | |
"hash_full_prompts": "324a4d07d3b7de40", | |
"hash_input_tokens": "62a14b0cfb722cf4", | |
"hash_cont_tokens": "711fb41221b6515e" | |
}, | |
"truncated": 0, | |
"non_truncated": 207, | |
"padded": 830, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"hashes": { | |
"hash_examples": "1010b21fde4726ab", | |
"hash_full_prompts": "938611fd6ecdbf4e", | |
"hash_input_tokens": "19eb2367f1cb72de", | |
"hash_cont_tokens": "393b1820b8f4534f" | |
}, | |
"truncated": 0, | |
"non_truncated": 246, | |
"padded": 981, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"hashes": { | |
"hash_examples": "4864e492a350ae93", | |
"hash_full_prompts": "17369402cc020876", | |
"hash_input_tokens": "066e65f806b474dd", | |
"hash_cont_tokens": "dd72ceb1d7224598" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1222, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"hashes": { | |
"hash_examples": "ec3a021e37650e7d", | |
"hash_full_prompts": "79c20e1e52638229", | |
"hash_input_tokens": "2e676a0441f390c7", | |
"hash_cont_tokens": "e06462bcee629ea8" | |
}, | |
"truncated": 0, | |
"non_truncated": 199, | |
"padded": 794, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"hashes": { | |
"hash_examples": "b3fad1596f1ae1f9", | |
"hash_full_prompts": "541c270a0bbab3fd", | |
"hash_input_tokens": "cf7e8a42c7bdf5cd", | |
"hash_cont_tokens": "87cce58a4ec6cfd8" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 935, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"hashes": { | |
"hash_examples": "1d1088556861b0b0", | |
"hash_full_prompts": "6002200193304ad6", | |
"hash_input_tokens": "a2cfd284204fffcc", | |
"hash_cont_tokens": "771402d59229cbae" | |
}, | |
"truncated": 0, | |
"non_truncated": 351, | |
"padded": 1401, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"hashes": { | |
"hash_examples": "eb05f035c7bfca2f", | |
"hash_full_prompts": "7b606fe0e48d1136", | |
"hash_input_tokens": "798a188685f3c7ba", | |
"hash_cont_tokens": "f1574dddbe4231e1" | |
}, | |
"truncated": 0, | |
"non_truncated": 200, | |
"padded": 797, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"hashes": { | |
"hash_examples": "0a688a45f69c21e0", | |
"hash_full_prompts": "70df665a01ea64f7", | |
"hash_input_tokens": "4ef64d022f86d868", | |
"hash_cont_tokens": "106c1564fdd5ff8e" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2597, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"hashes": { | |
"hash_examples": "620d6888b6012ea5", | |
"hash_full_prompts": "11b751123ca8b711", | |
"hash_input_tokens": "d30860e1a31f27b3", | |
"hash_cont_tokens": "22d3640f03c44bb0" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2581, | |
"non_padded": 23, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"hashes": { | |
"hash_examples": "627c8f5ccd5da209", | |
"hash_full_prompts": "254286f37d4a76ce", | |
"hash_input_tokens": "dc24a391ad28e575", | |
"hash_cont_tokens": "9e13af9ad8f5f78e" | |
}, | |
"truncated": 0, | |
"non_truncated": 230, | |
"padded": 1140, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"hashes": { | |
"hash_examples": "794641c86de172f5", | |
"hash_full_prompts": "cf1badbb8c42f6a8", | |
"hash_input_tokens": "2252f2846ec8cecf", | |
"hash_cont_tokens": "86333847359ceee5" | |
}, | |
"truncated": 0, | |
"non_truncated": 510, | |
"padded": 2525, | |
"non_padded": 25, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"hashes": { | |
"hash_examples": "35981ed917ea01cf", | |
"hash_full_prompts": "5fbc2eae9474469b", | |
"hash_input_tokens": "768fcfff60a6a668", | |
"hash_cont_tokens": "cb47c6b984067525" | |
}, | |
"truncated": 0, | |
"non_truncated": 269, | |
"padded": 1345, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"hashes": { | |
"hash_examples": "041c39c646536a1e", | |
"hash_full_prompts": "171c266251542e36", | |
"hash_input_tokens": "c5342c28aac7b5e9", | |
"hash_cont_tokens": "cb01422bf828aefe" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 821, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"hashes": { | |
"hash_examples": "e4d9284367dff68f", | |
"hash_full_prompts": "66bc76f2704c6ca7", | |
"hash_input_tokens": "582f986b74383001", | |
"hash_cont_tokens": "cb01422bf828aefe" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 817, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"hashes": { | |
"hash_examples": "01db7291603fc1a0", | |
"hash_full_prompts": "6016bbecd8642dbe", | |
"hash_input_tokens": "0f1ec1ab676fb3e4", | |
"hash_cont_tokens": "0c4980b69a75cb83" | |
}, | |
"truncated": 0, | |
"non_truncated": 220, | |
"padded": 876, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "da3af66181f18ddf", | |
"hash_full_prompts": "e341944cee05878c", | |
"hash_input_tokens": "5f2600d57bfbb6aa", | |
"hash_cont_tokens": "7f70e41f6e93ac40" | |
}, | |
"truncated": 0, | |
"non_truncated": 5151, | |
"padded": 21772, | |
"non_padded": 92, | |
"num_truncated_few_shots": 0 | |
} | |
} |