lewtun's picture
lewtun HF staff
Upload eval_results/meta-llama/Meta-Llama-3-70B-Instruct/main/agieval/results_2024-04-21T20-29-40.818491.json with huggingface_hub
db0af34 verified
raw
history blame
25.9 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 4,
"max_samples": null,
"job_id": "",
"start_time": 4163358.325228971,
"end_time": 4166270.189569278,
"total_evaluation_time_secondes": "2911.864340307191",
"model_name": "meta-llama/Meta-Llama-3-70B-Instruct",
"model_sha": "5fcb2901844dde3111159f24205b71c25900ffbd",
"model_dtype": "torch.bfloat16",
"model_size": "131.73 GB",
"config": null
},
"results": {
"lighteval|agieval:aqua-rat|0": {
"acc": 0.40551181102362205,
"acc_stderr": 0.030868328175712653,
"acc_norm": 0.3937007874015748,
"acc_norm_stderr": 0.030716121952972117
},
"lighteval|agieval:gaokao-biology|0": {
"acc": 0.6714285714285714,
"acc_stderr": 0.032489397968768416,
"acc_norm": 0.6142857142857143,
"acc_norm_stderr": 0.03367014035794127
},
"lighteval|agieval:gaokao-chemistry|0": {
"acc": 0.3526570048309179,
"acc_stderr": 0.0332896994021347,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.032844309272143116
},
"lighteval|agieval:gaokao-chinese|0": {
"acc": 0.5487804878048781,
"acc_stderr": 0.031791441795983734,
"acc_norm": 0.532520325203252,
"acc_norm_stderr": 0.03187619087482798
},
"lighteval|agieval:gaokao-english|0": {
"acc": 0.803921568627451,
"acc_stderr": 0.02273378940544759,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.026787453111906508
},
"lighteval|agieval:gaokao-geography|0": {
"acc": 0.7135678391959799,
"acc_stderr": 0.03212890530137069,
"acc_norm": 0.6884422110552764,
"acc_norm_stderr": 0.03291322637124229
},
"lighteval|agieval:gaokao-history|0": {
"acc": 0.7021276595744681,
"acc_stderr": 0.02989614568209546,
"acc_norm": 0.574468085106383,
"acc_norm_stderr": 0.03232146916224469
},
"lighteval|agieval:gaokao-mathqa|0": {
"acc": 0.3732193732193732,
"acc_stderr": 0.02585269660834589,
"acc_norm": 0.3247863247863248,
"acc_norm_stderr": 0.025031418430108834
},
"lighteval|agieval:gaokao-physics|0": {
"acc": 0.485,
"acc_stderr": 0.03542810683297719,
"acc_norm": 0.46,
"acc_norm_stderr": 0.03533045720097817
},
"lighteval|agieval:logiqa-en|0": {
"acc": 0.5514592933947773,
"acc_stderr": 0.019507471591213885,
"acc_norm": 0.40860215053763443,
"acc_norm_stderr": 0.019281175878347597
},
"lighteval|agieval:logiqa-zh|0": {
"acc": 0.4869431643625192,
"acc_stderr": 0.019604925560180238,
"acc_norm": 0.4485407066052227,
"acc_norm_stderr": 0.019507471591213885
},
"lighteval|agieval:lsat-ar|0": {
"acc": 0.3,
"acc_stderr": 0.030282512572202356,
"acc_norm": 0.2391304347826087,
"acc_norm_stderr": 0.02818738529393393
},
"lighteval|agieval:lsat-lr|0": {
"acc": 0.7764705882352941,
"acc_stderr": 0.018465920069400524,
"acc_norm": 0.4823529411764706,
"acc_norm_stderr": 0.02214830266704872
},
"lighteval|agieval:lsat-rc|0": {
"acc": 0.8661710037174721,
"acc_stderr": 0.020797423422911385,
"acc_norm": 0.5613382899628253,
"acc_norm_stderr": 0.030311665540718364
},
"lighteval|agieval:sat-en|0": {
"acc": 0.8883495145631068,
"acc_stderr": 0.021996081085391462,
"acc_norm": 0.6796116504854369,
"acc_norm_stderr": 0.032590560881716434
},
"lighteval|agieval:sat-en-without-passage|0": {
"acc": 0.5533980582524272,
"acc_stderr": 0.034721796582639484,
"acc_norm": 0.4368932038834951,
"acc_norm_stderr": 0.03464225055241278
},
"lighteval|agieval:sat-math|0": {
"acc": 0.6590909090909091,
"acc_stderr": 0.03203095553573995,
"acc_norm": 0.4590909090909091,
"acc_norm_stderr": 0.033673590744258824
},
"lighteval|agieval:_average|0": {
"acc": 0.5963586380777509,
"acc_stderr": 0.027757976328971506,
"acc_norm": 0.4890333915253973,
"acc_norm_stderr": 0.029519599404942087
},
"all": {
"acc": 0.5963586380777509,
"acc_stderr": 0.027757976328971506,
"acc_norm": 0.4890333915253973,
"acc_norm_stderr": 0.029519599404942087
}
},
"versions": {
"lighteval|agieval:aqua-rat|0": 0,
"lighteval|agieval:gaokao-biology|0": 0,
"lighteval|agieval:gaokao-chemistry|0": 0,
"lighteval|agieval:gaokao-chinese|0": 0,
"lighteval|agieval:gaokao-english|0": 0,
"lighteval|agieval:gaokao-geography|0": 0,
"lighteval|agieval:gaokao-history|0": 0,
"lighteval|agieval:gaokao-mathqa|0": 0,
"lighteval|agieval:gaokao-physics|0": 0,
"lighteval|agieval:logiqa-en|0": 0,
"lighteval|agieval:logiqa-zh|0": 0,
"lighteval|agieval:lsat-ar|0": 0,
"lighteval|agieval:lsat-lr|0": 0,
"lighteval|agieval:lsat-rc|0": 0,
"lighteval|agieval:sat-en|0": 0,
"lighteval|agieval:sat-en-without-passage|0": 0,
"lighteval|agieval:sat-math|0": 0
},
"config_tasks": {
"lighteval|agieval:aqua-rat": {
"name": "agieval:aqua-rat",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-aqua-rat",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 254,
"effective_num_docs": 254,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-biology": {
"name": "agieval:gaokao-biology",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-biology",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 210,
"effective_num_docs": 210,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-chemistry": {
"name": "agieval:gaokao-chemistry",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-chemistry",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 207,
"effective_num_docs": 207,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-chinese": {
"name": "agieval:gaokao-chinese",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-chinese",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 246,
"effective_num_docs": 246,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-english": {
"name": "agieval:gaokao-english",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-english",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-geography": {
"name": "agieval:gaokao-geography",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-geography",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 199,
"effective_num_docs": 199,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-history": {
"name": "agieval:gaokao-history",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-history",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-mathqa": {
"name": "agieval:gaokao-mathqa",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-mathqa",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 351,
"effective_num_docs": 351,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-physics": {
"name": "agieval:gaokao-physics",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-physics",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 200,
"effective_num_docs": 200,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:logiqa-en": {
"name": "agieval:logiqa-en",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-logiqa-en",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 651,
"effective_num_docs": 651,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:logiqa-zh": {
"name": "agieval:logiqa-zh",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-logiqa-zh",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 651,
"effective_num_docs": 651,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-ar": {
"name": "agieval:lsat-ar",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-ar",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 230,
"effective_num_docs": 230,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-lr": {
"name": "agieval:lsat-lr",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-lr",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 510,
"effective_num_docs": 510,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-rc": {
"name": "agieval:lsat-rc",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-rc",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 269,
"effective_num_docs": 269,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-en": {
"name": "agieval:sat-en",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-en",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 206,
"effective_num_docs": 206,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-en-without-passage": {
"name": "agieval:sat-en-without-passage",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-en-without-passage",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 206,
"effective_num_docs": 206,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-math": {
"name": "agieval:sat-math",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-math",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 220,
"effective_num_docs": 220,
"trust_dataset": true,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"lighteval|agieval:aqua-rat|0": {
"hashes": {
"hash_examples": "f09607f69e5b7525",
"hash_full_prompts": "286855998c6e5e88",
"hash_input_tokens": "c8a11a37fad94991",
"hash_cont_tokens": "ef0b3383d6f03cf7"
},
"truncated": 0,
"non_truncated": 254,
"padded": 1270,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-biology|0": {
"hashes": {
"hash_examples": "f262eaf4a72db963",
"hash_full_prompts": "079710887794e92b",
"hash_input_tokens": "760164ddee928404",
"hash_cont_tokens": "6dbb7db97194d8c0"
},
"truncated": 0,
"non_truncated": 210,
"padded": 840,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-chemistry|0": {
"hashes": {
"hash_examples": "47f2e649f58d9da5",
"hash_full_prompts": "34855f051b6dd679",
"hash_input_tokens": "eb3e767b26311b8d",
"hash_cont_tokens": "b01f9f3c4d008784"
},
"truncated": 0,
"non_truncated": 207,
"padded": 825,
"non_padded": 6,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-chinese|0": {
"hashes": {
"hash_examples": "1010b21fde4726ab",
"hash_full_prompts": "49905c6e90bb4ffb",
"hash_input_tokens": "ebfb3b7cbf519cb9",
"hash_cont_tokens": "c14e536a2664f693"
},
"truncated": 0,
"non_truncated": 246,
"padded": 981,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-english|0": {
"hashes": {
"hash_examples": "4864e492a350ae93",
"hash_full_prompts": "0d368ac714d82cbd",
"hash_input_tokens": "97b152ce2f9dd276",
"hash_cont_tokens": "b0295fd391ea9065"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1224,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-geography|0": {
"hashes": {
"hash_examples": "ec3a021e37650e7d",
"hash_full_prompts": "ae39d50701103ed2",
"hash_input_tokens": "03fe8ffbd234ac1f",
"hash_cont_tokens": "5696e6aef46a4012"
},
"truncated": 0,
"non_truncated": 199,
"padded": 794,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-history|0": {
"hashes": {
"hash_examples": "b3fad1596f1ae1f9",
"hash_full_prompts": "d846ac9f414ea77f",
"hash_input_tokens": "792a5a317dd88386",
"hash_cont_tokens": "e2947e88a3f08a49"
},
"truncated": 0,
"non_truncated": 235,
"padded": 938,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-mathqa|0": {
"hashes": {
"hash_examples": "1d1088556861b0b0",
"hash_full_prompts": "2d6d02d7c7878410",
"hash_input_tokens": "23b49a4c37eb5643",
"hash_cont_tokens": "7bce9c6eac88e31d"
},
"truncated": 0,
"non_truncated": 351,
"padded": 1403,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-physics|0": {
"hashes": {
"hash_examples": "eb05f035c7bfca2f",
"hash_full_prompts": "c32439b20ae8ddc3",
"hash_input_tokens": "83afe03525a018b6",
"hash_cont_tokens": "77f9a76c75c408ee"
},
"truncated": 0,
"non_truncated": 200,
"padded": 792,
"non_padded": 8,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:logiqa-en|0": {
"hashes": {
"hash_examples": "0a688a45f69c21e0",
"hash_full_prompts": "b758941be76e6c93",
"hash_input_tokens": "3b4e102d5a77c2ee",
"hash_cont_tokens": "bdb78fbd84121ffa"
},
"truncated": 0,
"non_truncated": 651,
"padded": 2591,
"non_padded": 13,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:logiqa-zh|0": {
"hashes": {
"hash_examples": "620d6888b6012ea5",
"hash_full_prompts": "00581fe3b81ab4bf",
"hash_input_tokens": "47c668eacbda6e4c",
"hash_cont_tokens": "a470f1c94ce464df"
},
"truncated": 0,
"non_truncated": 651,
"padded": 2577,
"non_padded": 27,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-ar|0": {
"hashes": {
"hash_examples": "627c8f5ccd5da209",
"hash_full_prompts": "c35e6779d96ec186",
"hash_input_tokens": "21aca9a9e3907790",
"hash_cont_tokens": "5a2d4bf9575205d0"
},
"truncated": 0,
"non_truncated": 230,
"padded": 1134,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-lr|0": {
"hashes": {
"hash_examples": "794641c86de172f5",
"hash_full_prompts": "28132acff0b72fac",
"hash_input_tokens": "21b8cf84591f6416",
"hash_cont_tokens": "b7efbd859fa461cb"
},
"truncated": 0,
"non_truncated": 510,
"padded": 2528,
"non_padded": 22,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-rc|0": {
"hashes": {
"hash_examples": "35981ed917ea01cf",
"hash_full_prompts": "38671d826aec9b04",
"hash_input_tokens": "e6836cf65003774d",
"hash_cont_tokens": "25cd21c6f88e868d"
},
"truncated": 0,
"non_truncated": 269,
"padded": 1345,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-en|0": {
"hashes": {
"hash_examples": "041c39c646536a1e",
"hash_full_prompts": "7cfcf777a80aab88",
"hash_input_tokens": "338b5d3aa5e5d50c",
"hash_cont_tokens": "a32456da5b0cb4d6"
},
"truncated": 0,
"non_truncated": 206,
"padded": 821,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-en-without-passage|0": {
"hashes": {
"hash_examples": "e4d9284367dff68f",
"hash_full_prompts": "858c585678a575e3",
"hash_input_tokens": "0fa7ff75d35b4ef7",
"hash_cont_tokens": "b5be143c7f4c4669"
},
"truncated": 0,
"non_truncated": 206,
"padded": 814,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-math|0": {
"hashes": {
"hash_examples": "01db7291603fc1a0",
"hash_full_prompts": "22304b8d6215acee",
"hash_input_tokens": "b41cfbbe32e7050d",
"hash_cont_tokens": "05e08502d89d7a73"
},
"truncated": 0,
"non_truncated": 220,
"padded": 873,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "da3af66181f18ddf",
"hash_full_prompts": "38bd3cda94f79eaf",
"hash_input_tokens": "4f8569cc545f17c5",
"hash_cont_tokens": "0e86a0b3e19b1c9c"
},
"truncated": 0,
"non_truncated": 5151,
"padded": 21750,
"non_padded": 114,
"num_truncated_few_shots": 0
}
}