open-r1-eval-leaderboard
/
eval_results
/openchat
/openchat-3.5-0106
/main
/agieval
/results_2024-03-28T16-28-08.688920.json
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1616328.065043648, | |
"end_time": 1616595.808001383, | |
"total_evaluation_time_secondes": "267.7429577349685", | |
"model_name": "openchat/openchat-3.5-0106", | |
"model_sha": "9619fb7d2a8e25fa6b0633c0f57f7f4aa79b45c4", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "13.61 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|agieval:aqua-rat|0": { | |
"acc": 0.2795275590551181, | |
"acc_stderr": 0.02821374533845074, | |
"acc_norm": 0.2795275590551181, | |
"acc_norm_stderr": 0.02821374533845074 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"acc": 0.4, | |
"acc_stderr": 0.03388694968349424, | |
"acc_norm": 0.40476190476190477, | |
"acc_norm_stderr": 0.03395252139627751 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"acc": 0.2898550724637681, | |
"acc_stderr": 0.03161042514455282, | |
"acc_norm": 0.30917874396135264, | |
"acc_norm_stderr": 0.03219986494000449 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"acc": 0.3252032520325203, | |
"acc_stderr": 0.029928220038850473, | |
"acc_norm": 0.3333333333333333, | |
"acc_norm_stderr": 0.030116930096841722 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"acc": 0.6895424836601307, | |
"acc_stderr": 0.02649303322514589, | |
"acc_norm": 0.6797385620915033, | |
"acc_norm_stderr": 0.02671611838015684 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"acc": 0.49748743718592964, | |
"acc_stderr": 0.03553300407972604, | |
"acc_norm": 0.49748743718592964, | |
"acc_norm_stderr": 0.03553300407972604 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"acc": 0.46808510638297873, | |
"acc_stderr": 0.03261936918467382, | |
"acc_norm": 0.4723404255319149, | |
"acc_norm_stderr": 0.03263597118409769 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"acc": 0.2962962962962963, | |
"acc_stderr": 0.02440753988290111, | |
"acc_norm": 0.2905982905982906, | |
"acc_norm_stderr": 0.02426937659448 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"acc": 0.37, | |
"acc_stderr": 0.03422508997679329, | |
"acc_norm": 0.39, | |
"acc_norm_stderr": 0.03457567623250012 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"acc": 0.38402457757296465, | |
"acc_stderr": 0.019076755948732343, | |
"acc_norm": 0.3763440860215054, | |
"acc_norm_stderr": 0.019002397214689486 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"acc": 0.37327188940092165, | |
"acc_stderr": 0.018971232715472068, | |
"acc_norm": 0.3763440860215054, | |
"acc_norm_stderr": 0.01900239721468949 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"acc": 0.18695652173913044, | |
"acc_stderr": 0.025763772398512335, | |
"acc_norm": 0.1782608695652174, | |
"acc_norm_stderr": 0.025291655246273914 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"acc": 0.5352941176470588, | |
"acc_stderr": 0.02210682784673137, | |
"acc_norm": 0.515686274509804, | |
"acc_norm_stderr": 0.022151201291314516 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"acc": 0.5799256505576208, | |
"acc_stderr": 0.030149620080212002, | |
"acc_norm": 0.5390334572490706, | |
"acc_norm_stderr": 0.030449149512372407 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"acc": 0.7961165048543689, | |
"acc_stderr": 0.028138595623668775, | |
"acc_norm": 0.7718446601941747, | |
"acc_norm_stderr": 0.029309157873241714 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"acc": 0.4320388349514563, | |
"acc_stderr": 0.0345974255383149, | |
"acc_norm": 0.41262135922330095, | |
"acc_norm_stderr": 0.03438412659410013 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"acc": 0.35, | |
"acc_stderr": 0.032230618755899304, | |
"acc_norm": 0.3, | |
"acc_norm_stderr": 0.03096617686426667 | |
}, | |
"lighteval|agieval:_average|0": { | |
"acc": 0.4266838414000154, | |
"acc_stderr": 0.028703072086007732, | |
"acc_norm": 0.4192412381943485, | |
"acc_norm_stderr": 0.028751145297263737 | |
}, | |
"all": { | |
"acc": 0.4266838414000154, | |
"acc_stderr": 0.028703072086007732, | |
"acc_norm": 0.4192412381943485, | |
"acc_norm_stderr": 0.028751145297263737 | |
} | |
}, | |
"versions": { | |
"lighteval|agieval:aqua-rat|0": 0, | |
"lighteval|agieval:gaokao-biology|0": 0, | |
"lighteval|agieval:gaokao-chemistry|0": 0, | |
"lighteval|agieval:gaokao-chinese|0": 0, | |
"lighteval|agieval:gaokao-english|0": 0, | |
"lighteval|agieval:gaokao-geography|0": 0, | |
"lighteval|agieval:gaokao-history|0": 0, | |
"lighteval|agieval:gaokao-mathqa|0": 0, | |
"lighteval|agieval:gaokao-physics|0": 0, | |
"lighteval|agieval:logiqa-en|0": 0, | |
"lighteval|agieval:logiqa-zh|0": 0, | |
"lighteval|agieval:lsat-ar|0": 0, | |
"lighteval|agieval:lsat-lr|0": 0, | |
"lighteval|agieval:lsat-rc|0": 0, | |
"lighteval|agieval:sat-en|0": 0, | |
"lighteval|agieval:sat-en-without-passage|0": 0, | |
"lighteval|agieval:sat-math|0": 0 | |
}, | |
"config_tasks": { | |
"lighteval|agieval:aqua-rat": { | |
"name": "agieval:aqua-rat", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-aqua-rat", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 254, | |
"effective_num_docs": 254, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-biology": { | |
"name": "agieval:gaokao-biology", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-biology", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 210, | |
"effective_num_docs": 210, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chemistry": { | |
"name": "agieval:gaokao-chemistry", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chemistry", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 207, | |
"effective_num_docs": 207, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-chinese": { | |
"name": "agieval:gaokao-chinese", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-chinese", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 246, | |
"effective_num_docs": 246, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-english": { | |
"name": "agieval:gaokao-english", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-english", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 306, | |
"effective_num_docs": 306, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-geography": { | |
"name": "agieval:gaokao-geography", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-geography", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 199, | |
"effective_num_docs": 199, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-history": { | |
"name": "agieval:gaokao-history", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-history", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 235, | |
"effective_num_docs": 235, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-mathqa": { | |
"name": "agieval:gaokao-mathqa", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-mathqa", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 351, | |
"effective_num_docs": 351, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:gaokao-physics": { | |
"name": "agieval:gaokao-physics", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-gaokao-physics", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 200, | |
"effective_num_docs": 200, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-en": { | |
"name": "agieval:logiqa-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:logiqa-zh": { | |
"name": "agieval:logiqa-zh", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-logiqa-zh", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 651, | |
"effective_num_docs": 651, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-ar": { | |
"name": "agieval:lsat-ar", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-ar", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 230, | |
"effective_num_docs": 230, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-lr": { | |
"name": "agieval:lsat-lr", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-lr", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 510, | |
"effective_num_docs": 510, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:lsat-rc": { | |
"name": "agieval:lsat-rc", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-lsat-rc", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 269, | |
"effective_num_docs": 269, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en": { | |
"name": "agieval:sat-en", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-en-without-passage": { | |
"name": "agieval:sat-en-without-passage", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-en-without-passage", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 206, | |
"effective_num_docs": 206, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
}, | |
"lighteval|agieval:sat-math": { | |
"name": "agieval:sat-math", | |
"prompt_function": "agieval", | |
"hf_repo": "dmayhem93/agieval-sat-math", | |
"hf_subset": "default", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": 1, | |
"stop_sequence": null, | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 220, | |
"effective_num_docs": 220, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|agieval:aqua-rat|0": { | |
"hashes": { | |
"hash_examples": "f09607f69e5b7525", | |
"hash_full_prompts": "c885824d200848ff", | |
"hash_input_tokens": "bbc9473dbbe6f16c", | |
"hash_cont_tokens": "7c2bcf33ed1e5ae3" | |
}, | |
"truncated": 0, | |
"non_truncated": 254, | |
"padded": 1270, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-biology|0": { | |
"hashes": { | |
"hash_examples": "f262eaf4a72db963", | |
"hash_full_prompts": "a57b68a06f7c5dad", | |
"hash_input_tokens": "ca1fe44a4eaeed8d", | |
"hash_cont_tokens": "b3febf85776696e1" | |
}, | |
"truncated": 0, | |
"non_truncated": 210, | |
"padded": 840, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chemistry|0": { | |
"hashes": { | |
"hash_examples": "47f2e649f58d9da5", | |
"hash_full_prompts": "77b311499beca56b", | |
"hash_input_tokens": "43896e11a60e81d3", | |
"hash_cont_tokens": "711fb41221b6515e" | |
}, | |
"truncated": 0, | |
"non_truncated": 207, | |
"padded": 830, | |
"non_padded": 1, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-chinese|0": { | |
"hashes": { | |
"hash_examples": "1010b21fde4726ab", | |
"hash_full_prompts": "765b83dc3fe58e0e", | |
"hash_input_tokens": "c2e6ac114faa28b7", | |
"hash_cont_tokens": "393b1820b8f4534f" | |
}, | |
"truncated": 0, | |
"non_truncated": 246, | |
"padded": 981, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-english|0": { | |
"hashes": { | |
"hash_examples": "4864e492a350ae93", | |
"hash_full_prompts": "2d90914067cbddb2", | |
"hash_input_tokens": "19ee1ddb098e0782", | |
"hash_cont_tokens": "dd72ceb1d7224598" | |
}, | |
"truncated": 0, | |
"non_truncated": 306, | |
"padded": 1222, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-geography|0": { | |
"hashes": { | |
"hash_examples": "ec3a021e37650e7d", | |
"hash_full_prompts": "26abf6b8b06b03fd", | |
"hash_input_tokens": "9c63079da9deb525", | |
"hash_cont_tokens": "e06462bcee629ea8" | |
}, | |
"truncated": 0, | |
"non_truncated": 199, | |
"padded": 794, | |
"non_padded": 2, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-history|0": { | |
"hashes": { | |
"hash_examples": "b3fad1596f1ae1f9", | |
"hash_full_prompts": "d22e901442960499", | |
"hash_input_tokens": "cd1d7e17c8e3016d", | |
"hash_cont_tokens": "87cce58a4ec6cfd8" | |
}, | |
"truncated": 0, | |
"non_truncated": 235, | |
"padded": 935, | |
"non_padded": 5, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-mathqa|0": { | |
"hashes": { | |
"hash_examples": "1d1088556861b0b0", | |
"hash_full_prompts": "8f130ed7923a2e96", | |
"hash_input_tokens": "d4e27a91afe699a4", | |
"hash_cont_tokens": "771402d59229cbae" | |
}, | |
"truncated": 0, | |
"non_truncated": 351, | |
"padded": 1401, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:gaokao-physics|0": { | |
"hashes": { | |
"hash_examples": "eb05f035c7bfca2f", | |
"hash_full_prompts": "04c26f6aa4512fa0", | |
"hash_input_tokens": "c571bd43f31938ce", | |
"hash_cont_tokens": "f1574dddbe4231e1" | |
}, | |
"truncated": 0, | |
"non_truncated": 200, | |
"padded": 797, | |
"non_padded": 3, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-en|0": { | |
"hashes": { | |
"hash_examples": "0a688a45f69c21e0", | |
"hash_full_prompts": "1e7fa6f1e62bea8b", | |
"hash_input_tokens": "c349474caa787f2c", | |
"hash_cont_tokens": "106c1564fdd5ff8e" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2597, | |
"non_padded": 7, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:logiqa-zh|0": { | |
"hashes": { | |
"hash_examples": "620d6888b6012ea5", | |
"hash_full_prompts": "f5e4b12f2be2730a", | |
"hash_input_tokens": "7443875e78dae670", | |
"hash_cont_tokens": "22d3640f03c44bb0" | |
}, | |
"truncated": 0, | |
"non_truncated": 651, | |
"padded": 2581, | |
"non_padded": 23, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-ar|0": { | |
"hashes": { | |
"hash_examples": "627c8f5ccd5da209", | |
"hash_full_prompts": "93f01bfefd21f036", | |
"hash_input_tokens": "cd6e0cee27c3c3e4", | |
"hash_cont_tokens": "9e13af9ad8f5f78e" | |
}, | |
"truncated": 0, | |
"non_truncated": 230, | |
"padded": 1140, | |
"non_padded": 10, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-lr|0": { | |
"hashes": { | |
"hash_examples": "794641c86de172f5", | |
"hash_full_prompts": "8e81a99c7521258c", | |
"hash_input_tokens": "09e76ba415ce4540", | |
"hash_cont_tokens": "86333847359ceee5" | |
}, | |
"truncated": 0, | |
"non_truncated": 510, | |
"padded": 2525, | |
"non_padded": 25, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:lsat-rc|0": { | |
"hashes": { | |
"hash_examples": "35981ed917ea01cf", | |
"hash_full_prompts": "b6e8d1adf72e9fb4", | |
"hash_input_tokens": "78bda8444d556179", | |
"hash_cont_tokens": "cb47c6b984067525" | |
}, | |
"truncated": 0, | |
"non_truncated": 269, | |
"padded": 1345, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en|0": { | |
"hashes": { | |
"hash_examples": "041c39c646536a1e", | |
"hash_full_prompts": "b9081e0a4bf5d97f", | |
"hash_input_tokens": "16dc5e2c7544b9f1", | |
"hash_cont_tokens": "cb01422bf828aefe" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 821, | |
"non_padded": 0, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-en-without-passage|0": { | |
"hashes": { | |
"hash_examples": "e4d9284367dff68f", | |
"hash_full_prompts": "f1362aa3ba355eaa", | |
"hash_input_tokens": "c4236dd09d0088d9", | |
"hash_cont_tokens": "cb01422bf828aefe" | |
}, | |
"truncated": 0, | |
"non_truncated": 206, | |
"padded": 817, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
}, | |
"lighteval|agieval:sat-math|0": { | |
"hashes": { | |
"hash_examples": "01db7291603fc1a0", | |
"hash_full_prompts": "8dab33ab73deb698", | |
"hash_input_tokens": "e4d4e2e413144ff5", | |
"hash_cont_tokens": "0c4980b69a75cb83" | |
}, | |
"truncated": 0, | |
"non_truncated": 220, | |
"padded": 876, | |
"non_padded": 4, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "da3af66181f18ddf", | |
"hash_full_prompts": "37ef9da1700eed47", | |
"hash_input_tokens": "3a23d7f630bcf75b", | |
"hash_cont_tokens": "7f70e41f6e93ac40" | |
}, | |
"truncated": 0, | |
"non_truncated": 5151, | |
"padded": 21772, | |
"non_padded": 92, | |
"num_truncated_few_shots": 0 | |
} | |
} |