open-r1-eval-leaderboard
/
eval_results
/deepseek-ai
/deepseek-llm-67b-chat
/main
/agieval
/results_2024-03-28T17-11-01.242076.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 2077820.830040582, | |
"end_time": 2080750.614670217, | |
"total_evaluation_time_secondes": "2929.784629635047", | |
"model_name": "deepseek-ai/deepseek-llm-67b-chat", | |
"model_sha": "79648bef7658bb824e4630740f6e1484c1b0620b", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "125.77 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|agieval:aqua-rat|0": { | |
"acc": 0.3188976377952756, | |
"acc_stderr": 0.029300267397764652, | |
"acc_norm": 0.2874015748031496, | |
"acc_norm_stderr": 0.028451603010183634 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"acc": 0.5857142857142857, | |
"acc_stderr": 0.034073735217849545, | |
"acc_norm": 0.5428571428571428, | |
"acc_norm_stderr": 0.034458439380315835 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"acc": 0.4251207729468599, | |
"acc_stderr": 0.034443784322092386, | |
"acc_norm": 0.34782608695652173, | |
"acc_norm_stderr": 0.03318403378139901 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"acc": 0.5975609756097561, | |
"acc_stderr": 0.03132983555771989, | |
"acc_norm": 0.5487804878048781, | |
"acc_norm_stderr": 0.031791441795983734 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"acc": 0.696078431372549, | |
"acc_stderr": 0.026336613469046633, | |
"acc_norm": 0.6633986928104575, | |
"acc_norm_stderr": 0.027057974624494382 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"acc": 0.7336683417085427, | |
"acc_stderr": 0.03141439434617944, | |
"acc_norm": 0.7085427135678392, | |
"acc_norm_stderr": 0.03229519279811605 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"acc": 0.7574468085106383, | |
"acc_stderr": 0.02802022627120022, | |
"acc_norm": 0.6638297872340425, | |
"acc_norm_stderr": 0.030881618520676942 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"acc": 0.2934472934472934, | |
"acc_stderr": 0.02433903269681092, | |
"acc_norm": 0.2962962962962963, | |
"acc_norm_stderr": 0.024407539882901112 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"acc": 0.56, | |
"acc_stderr": 0.03518793763172071, | |
"acc_norm": 0.47, | |
"acc_norm_stderr": 0.03538020341900046 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"acc": 0.43010752688172044, | |
"acc_stderr": 0.01941906447090713, | |
"acc_norm": 0.4039938556067588, | |
"acc_norm_stderr": 0.019246690834000654 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"acc": 0.5422427035330261, | |
"acc_stderr": 0.019541496438967652, | |
"acc_norm": 0.445468509984639, | |
"acc_norm_stderr": 0.01949462713343998 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"acc": 0.25217391304347825, | |
"acc_stderr": 0.028696745294493377, | |
"acc_norm": 0.21739130434782608, | |
"acc_norm_stderr": 0.02725685083881996 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"acc": 0.5705882352941176, | |
"acc_stderr": 0.02194014455513715, | |
"acc_norm": 0.44901960784313727, | |
"acc_norm_stderr": 0.022046610724356357 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"acc": 0.6356877323420075, | |
"acc_stderr": 0.029396215063241374, | |
"acc_norm": 0.4758364312267658, | |
"acc_norm_stderr": 0.030506674211283072 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"acc": 0.7912621359223301, | |
"acc_stderr": 0.028384671935185526, | |
"acc_norm": 0.6941747572815534, | |
"acc_norm_stderr": 0.032180600400244896 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"acc": 0.4854368932038835, | |
"acc_stderr": 0.03490669905098905, | |
"acc_norm": 0.3155339805825243, | |
"acc_norm_stderr": 0.03245802450146824 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"acc": 0.42272727272727273, | |
"acc_stderr": 0.033380942640935336, | |
"acc_norm": 0.35, | |
"acc_norm_stderr": 0.032230618755899304 | |
}, | |
"lighteval|agieval:_average|0": { | |
"acc": 0.5351859388266493, | |
"acc_stderr": 0.02883010625648476, | |
"acc_norm": 0.4635500723060901, | |
"acc_norm_stderr": 0.029019337918387268 | |
}, | |
"all": { | |
"acc": 0.5351859388266493, | |
"acc_stderr": 0.02883010625648476, | |
"acc_norm": 0.4635500723060901, | |
"acc_norm_stderr": 0.029019337918387268 | |
} | |
}, | |
"versions": { | |
"lighteval|agieval:aqua-rat|0": 0, | |
"lighteval|agieval:gaokao-biology|0": 0, | |
"lighteval|agieval:gaokao-chemistry|0": 0, | |
"lighteval|agieval:gaokao-chinese|0": 0, | |
"lighteval|agieval:gaokao-english|0": 0, | |
"lighteval|agieval:gaokao-geography|0": 0, | |
"lighteval|agieval:gaokao-history|0": 0, | |
"lighteval|agieval:gaokao-mathqa|0": 0, | |
"lighteval|agieval:gaokao-physics|0": 0, | |
"lighteval|agieval:logiqa-en|0": 0, | |
"lighteval|agieval:logiqa-zh|0": 0, | |
"lighteval|agieval:lsat-ar|0": 0, | |
"lighteval|agieval:lsat-lr|0": 0, | |
"lighteval|agieval:lsat-rc|0": 0, | |
"lighteval|agieval:sat-en|0": 0, | |
"lighteval|agieval:sat-en-without-passage|0": 0, | |
"lighteval|agieval:sat-math|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|agieval:aqua-rat": { | |
"name": "agieval:aqua-rat", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-aqua-rat", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 254, | |
"effective_num_docs": 254, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-biology": { | |
"name": "agieval:gaokao-biology", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-biology", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chemistry": { | |
"name": "agieval:gaokao-chemistry", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chemistry", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 207, | |
"effective_num_docs": 207, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chinese": { | |
"name": "agieval:gaokao-chinese", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chinese", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 246, | |
"effective_num_docs": 246, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-english": { | |
"name": "agieval:gaokao-english", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-english", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-geography": { | |
"name": "agieval:gaokao-geography", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-geography", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 199, | |
"effective_num_docs": 199, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-history": { | |
"name": "agieval:gaokao-history", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-history", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-mathqa": { | |
"name": "agieval:gaokao-mathqa", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-mathqa", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 351, | |
"effective_num_docs": 351, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-physics": { | |
"name": "agieval:gaokao-physics", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-physics", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 200, | |
"effective_num_docs": 200, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-en": { | |
"name": "agieval:logiqa-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-zh": { | |
"name": "agieval:logiqa-zh", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-zh", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-ar": { | |
"name": "agieval:lsat-ar", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-ar", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 230, | |
"effective_num_docs": 230, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-lr": { | |
"name": "agieval:lsat-lr", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-lr", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 510, | |
"effective_num_docs": 510, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-rc": { | |
"name": "agieval:lsat-rc", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-rc", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 269, | |
"effective_num_docs": 269, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en": { | |
"name": "agieval:sat-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en-without-passage": { | |
"name": "agieval:sat-en-without-passage", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en-without-passage", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-math": { | |
"name": "agieval:sat-math", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-math", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 220, | |
"effective_num_docs": 220, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|agieval:aqua-rat|0": { | |
"hashes": { | |
"hash_examples": "f09607f69e5b7525", | |
"hash_full_prompts": "0e5e32a970e390ea", | |
"hash_input_tokens": "77a739a63819e008", | |
"hash_cont_tokens": "2cad1f93cd377428" | |
}, | |
"truncated": 0, | |
"non_truncated": 254, | |
"padded": 1270, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"hashes": { | |
"hash_examples": "f262eaf4a72db963", | |
"hash_full_prompts": "229fc17c14965d1c", | |
"hash_input_tokens": "abdc458e17f2ba7a", | |
"hash_cont_tokens": "decb1b6a4f846fdb" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 833, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"hashes": { | |
"hash_examples": "47f2e649f58d9da5", | |
"hash_full_prompts": "a1662423763cd50a", | |
"hash_input_tokens": "b30dc42a725e0381", | |
"hash_cont_tokens": "55034b0104261709" | |
}, | |
"truncated": 0, | |
"non_truncated": 207, | |
"padded": 826, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"hashes": { | |
"hash_examples": "1010b21fde4726ab", | |
"hash_full_prompts": "849f250b06540d96", | |
"hash_input_tokens": "efa18c51ae25e7dc", | |
"hash_cont_tokens": "ed6245bf0174f812" | |
}, | |
"truncated": 0, | |
"non_truncated": 246, | |
"padded": 981, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"hashes": { | |
"hash_examples": "4864e492a350ae93", | |
"hash_full_prompts": "8ae18cea7c33973f", | |
"hash_input_tokens": "ccc2862769a768f0", | |
"hash_cont_tokens": "ef0d21bb5e92ad27" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1224, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"hashes": { | |
"hash_examples": "ec3a021e37650e7d", | |
"hash_full_prompts": "9281a6eef7c0ff74", | |
"hash_input_tokens": "3d03a6deb16b00ae", | |
"hash_cont_tokens": "a3f1aa75c1c0f518" | |
}, | |
"truncated": 0, | |
"non_truncated": 199, | |
"padded": 791, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"hashes": { | |
"hash_examples": "b3fad1596f1ae1f9", | |
"hash_full_prompts": "d6df287dca759a9e", | |
"hash_input_tokens": "af0c96864f4164fa", | |
"hash_cont_tokens": "24b37966ac3f0820" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 930, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"hashes": { | |
"hash_examples": "1d1088556861b0b0", | |
"hash_full_prompts": "849410cb9a4101a4", | |
"hash_input_tokens": "37500c8ac391ff93", | |
"hash_cont_tokens": "3fd46a253d4f64f0" | |
}, | |
"truncated": 0, | |
"non_truncated": 351, | |
"padded": 1403, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"hashes": { | |
"hash_examples": "eb05f035c7bfca2f", | |
"hash_full_prompts": "ec6493a6dd766f44", | |
"hash_input_tokens": "9aa42e2ae8a705a3", | |
"hash_cont_tokens": "38fcd2b335c3fb40" | |
}, | |
"truncated": 0, | |
"non_truncated": 200, | |
"padded": 797, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"hashes": { | |
"hash_examples": "0a688a45f69c21e0", | |
"hash_full_prompts": "7fe2e6fedb447416", | |
"hash_input_tokens": "1f3460a64b7f60f1", | |
"hash_cont_tokens": "8bc0af2414f69ab8" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2602, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"hashes": { | |
"hash_examples": "620d6888b6012ea5", | |
"hash_full_prompts": "df88541b270818dc", | |
"hash_input_tokens": "2927a5683d83a326", | |
"hash_cont_tokens": "74ef8ea01dcec4c7" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2592, | |
"non_padded": 12, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"hashes": { | |
"hash_examples": "627c8f5ccd5da209", | |
"hash_full_prompts": "c65582c7eb7e71b3", | |
"hash_input_tokens": "4c412bfd912649cb", | |
"hash_cont_tokens": "dec54f05f564f9d4" | |
}, | |
"truncated": 0, | |
"non_truncated": 230, | |
"padded": 1149, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"hashes": { | |
"hash_examples": "794641c86de172f5", | |
"hash_full_prompts": "033f31969f6927c6", | |
"hash_input_tokens": "df058f1fd4548e45", | |
"hash_cont_tokens": "9c4ce131b925f840" | |
}, | |
"truncated": 0, | |
"non_truncated": 510, | |
"padded": 2539, | |
"non_padded": 11, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"hashes": { | |
"hash_examples": "35981ed917ea01cf", | |
"hash_full_prompts": "594ea1493ac00038", | |
"hash_input_tokens": "3dbe9d992702e035", | |
"hash_cont_tokens": "ee975d9e9167fa6f" | |
}, | |
"truncated": 0, | |
"non_truncated": 269, | |
"padded": 1345, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"hashes": { | |
"hash_examples": "041c39c646536a1e", | |
"hash_full_prompts": "6459686d0db60d98", | |
"hash_input_tokens": "2601f7afb0f6edda", | |
"hash_cont_tokens": "87912bae8bee15a6" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 821, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"hashes": { | |
"hash_examples": "e4d9284367dff68f", | |
"hash_full_prompts": "3111aa1c817d5f73", | |
"hash_input_tokens": "f2bed14fba7f44a1", | |
"hash_cont_tokens": "87912bae8bee15a6" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 812, | |
"non_padded": 9, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"hashes": { | |
"hash_examples": "01db7291603fc1a0", | |
"hash_full_prompts": "854368f14f99baee", | |
"hash_input_tokens": "0242f0472c553cfc", | |
"hash_cont_tokens": "d54c758fcae21262" | |
}, | |
"truncated": 0, | |
"non_truncated": 220, | |
"padded": 872, | |
"non_padded": 8, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "da3af66181f18ddf", | |
"hash_full_prompts": "0399ef5bbb34ed90", | |
"hash_input_tokens": "77cd14086f902b7e", | |
"hash_cont_tokens": "11392dba3393fe7f" | |
}, | |
"truncated": 0, | |
"non_truncated": 5151, | |
"padded": 21787, | |
"non_padded": 77, | |
"num_truncated_few_shots": 0 | |
} | |
} |