lewtun's picture
lewtun HF staff
Upload eval_results/meta-llama/Llama-2-7b-chat-hf/main/agieval/results_2024-03-28T16-52-55.106765.json with huggingface_hub
4e6391d verified
raw
history blame
25.9 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 679014.538111186,
"end_time": 679390.124622882,
"total_evaluation_time_secondes": "375.58651169599034",
"model_name": "meta-llama/Llama-2-7b-chat-hf",
"model_sha": "92011f62d7604e261f748ec0cfe6329f31193e33",
"model_dtype": "torch.bfloat16",
"model_size": "12.61 GB",
"config": null
},
"results": {
"lighteval|agieval:aqua-rat|0": {
"acc": 0.24803149606299213,
"acc_stderr": 0.027151447091181707,
"acc_norm": 0.24803149606299213,
"acc_norm_stderr": 0.027151447091181707
},
"lighteval|agieval:gaokao-biology|0": {
"acc": 0.2,
"acc_stderr": 0.02766857855464295,
"acc_norm": 0.28095238095238095,
"acc_norm_stderr": 0.031090094469344617
},
"lighteval|agieval:gaokao-chemistry|0": {
"acc": 0.2608695652173913,
"acc_stderr": 0.030594167471991684,
"acc_norm": 0.2318840579710145,
"acc_norm_stderr": 0.029404596565406532
},
"lighteval|agieval:gaokao-chinese|0": {
"acc": 0.1910569105691057,
"acc_stderr": 0.02511639577458705,
"acc_norm": 0.18292682926829268,
"acc_norm_stderr": 0.024699384806198304
},
"lighteval|agieval:gaokao-english|0": {
"acc": 0.4444444444444444,
"acc_stderr": 0.028452639985088006,
"acc_norm": 0.39869281045751637,
"acc_norm_stderr": 0.028036092273891762
},
"lighteval|agieval:gaokao-geography|0": {
"acc": 0.23618090452261306,
"acc_stderr": 0.030184574030479208,
"acc_norm": 0.24120603015075376,
"acc_norm_stderr": 0.030403488732701926
},
"lighteval|agieval:gaokao-history|0": {
"acc": 0.251063829787234,
"acc_stderr": 0.028346963777162452,
"acc_norm": 0.251063829787234,
"acc_norm_stderr": 0.02834696377716245
},
"lighteval|agieval:gaokao-mathqa|0": {
"acc": 0.245014245014245,
"acc_stderr": 0.02298957930108734,
"acc_norm": 0.28774928774928776,
"acc_norm_stderr": 0.024198561654366728
},
"lighteval|agieval:gaokao-physics|0": {
"acc": 0.255,
"acc_stderr": 0.030897382432918608,
"acc_norm": 0.275,
"acc_norm_stderr": 0.031652557907861915
},
"lighteval|agieval:logiqa-en|0": {
"acc": 0.27035330261136714,
"acc_stderr": 0.017420694783393142,
"acc_norm": 0.31797235023041476,
"acc_norm_stderr": 0.01826581231613446
},
"lighteval|agieval:logiqa-zh|0": {
"acc": 0.25960061443932414,
"acc_stderr": 0.017196070008180027,
"acc_norm": 0.3195084485407066,
"acc_norm_stderr": 0.018289248621981204
},
"lighteval|agieval:lsat-ar|0": {
"acc": 0.23043478260869565,
"acc_stderr": 0.027827807522276156,
"acc_norm": 0.20434782608695654,
"acc_norm_stderr": 0.026645808150011344
},
"lighteval|agieval:lsat-lr|0": {
"acc": 0.3215686274509804,
"acc_stderr": 0.020702886736741092,
"acc_norm": 0.2627450980392157,
"acc_norm_stderr": 0.019508202470897688
},
"lighteval|agieval:lsat-rc|0": {
"acc": 0.35687732342007433,
"acc_stderr": 0.029264357329058684,
"acc_norm": 0.24907063197026022,
"acc_norm_stderr": 0.026417602980579716
},
"lighteval|agieval:sat-en|0": {
"acc": 0.48058252427184467,
"acc_stderr": 0.034895171350660135,
"acc_norm": 0.3300970873786408,
"acc_norm_stderr": 0.03284353151466849
},
"lighteval|agieval:sat-en-without-passage|0": {
"acc": 0.3446601941747573,
"acc_stderr": 0.033193412858590815,
"acc_norm": 0.27184466019417475,
"acc_norm_stderr": 0.03107388056324749
},
"lighteval|agieval:sat-math|0": {
"acc": 0.22727272727272727,
"acc_stderr": 0.028318140007311816,
"acc_norm": 0.22272727272727272,
"acc_norm_stderr": 0.02811585901870265
},
"lighteval|agieval:_average|0": {
"acc": 0.28370655834516456,
"acc_stderr": 0.027071780530314754,
"acc_norm": 0.26916588809218317,
"acc_norm_stderr": 0.026831948994961122
},
"all": {
"acc": 0.28370655834516456,
"acc_stderr": 0.027071780530314754,
"acc_norm": 0.26916588809218317,
"acc_norm_stderr": 0.026831948994961122
}
},
"versions": {
"lighteval|agieval:aqua-rat|0": 0,
"lighteval|agieval:gaokao-biology|0": 0,
"lighteval|agieval:gaokao-chemistry|0": 0,
"lighteval|agieval:gaokao-chinese|0": 0,
"lighteval|agieval:gaokao-english|0": 0,
"lighteval|agieval:gaokao-geography|0": 0,
"lighteval|agieval:gaokao-history|0": 0,
"lighteval|agieval:gaokao-mathqa|0": 0,
"lighteval|agieval:gaokao-physics|0": 0,
"lighteval|agieval:logiqa-en|0": 0,
"lighteval|agieval:logiqa-zh|0": 0,
"lighteval|agieval:lsat-ar|0": 0,
"lighteval|agieval:lsat-lr|0": 0,
"lighteval|agieval:lsat-rc|0": 0,
"lighteval|agieval:sat-en|0": 0,
"lighteval|agieval:sat-en-without-passage|0": 0,
"lighteval|agieval:sat-math|0": 0
},
"config_tasks": {
"lighteval|agieval:aqua-rat": {
"name": "agieval:aqua-rat",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-aqua-rat",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 254,
"effective_num_docs": 254,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-biology": {
"name": "agieval:gaokao-biology",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-biology",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 210,
"effective_num_docs": 210,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-chemistry": {
"name": "agieval:gaokao-chemistry",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-chemistry",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 207,
"effective_num_docs": 207,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-chinese": {
"name": "agieval:gaokao-chinese",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-chinese",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 246,
"effective_num_docs": 246,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-english": {
"name": "agieval:gaokao-english",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-english",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-geography": {
"name": "agieval:gaokao-geography",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-geography",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 199,
"effective_num_docs": 199,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-history": {
"name": "agieval:gaokao-history",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-history",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-mathqa": {
"name": "agieval:gaokao-mathqa",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-mathqa",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 351,
"effective_num_docs": 351,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-physics": {
"name": "agieval:gaokao-physics",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-physics",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 200,
"effective_num_docs": 200,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:logiqa-en": {
"name": "agieval:logiqa-en",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-logiqa-en",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 651,
"effective_num_docs": 651,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:logiqa-zh": {
"name": "agieval:logiqa-zh",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-logiqa-zh",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 651,
"effective_num_docs": 651,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-ar": {
"name": "agieval:lsat-ar",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-ar",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 230,
"effective_num_docs": 230,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-lr": {
"name": "agieval:lsat-lr",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-lr",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 510,
"effective_num_docs": 510,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-rc": {
"name": "agieval:lsat-rc",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-rc",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 269,
"effective_num_docs": 269,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-en": {
"name": "agieval:sat-en",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-en",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 206,
"effective_num_docs": 206,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-en-without-passage": {
"name": "agieval:sat-en-without-passage",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-en-without-passage",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 206,
"effective_num_docs": 206,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-math": {
"name": "agieval:sat-math",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-math",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 220,
"effective_num_docs": 220,
"trust_dataset": true,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"lighteval|agieval:aqua-rat|0": {
"hashes": {
"hash_examples": "f09607f69e5b7525",
"hash_full_prompts": "3861b27efd5a6170",
"hash_input_tokens": "b81ddf32d9e36b1f",
"hash_cont_tokens": "d68d99aa7aafcf10"
},
"truncated": 0,
"non_truncated": 254,
"padded": 1270,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-biology|0": {
"hashes": {
"hash_examples": "f262eaf4a72db963",
"hash_full_prompts": "1ef14dcf5933ff33",
"hash_input_tokens": "0e02a46ea2b2e076",
"hash_cont_tokens": "fb818e5d1e6cfaf9"
},
"truncated": 0,
"non_truncated": 210,
"padded": 840,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-chemistry|0": {
"hashes": {
"hash_examples": "47f2e649f58d9da5",
"hash_full_prompts": "324a4d07d3b7de40",
"hash_input_tokens": "ab927b5975095914",
"hash_cont_tokens": "cae820a904612b14"
},
"truncated": 0,
"non_truncated": 207,
"padded": 831,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-chinese|0": {
"hashes": {
"hash_examples": "1010b21fde4726ab",
"hash_full_prompts": "938611fd6ecdbf4e",
"hash_input_tokens": "72d5df618421255b",
"hash_cont_tokens": "4f99bcd4d0795887"
},
"truncated": 0,
"non_truncated": 246,
"padded": 979,
"non_padded": 5,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-english|0": {
"hashes": {
"hash_examples": "4864e492a350ae93",
"hash_full_prompts": "2cff929f74eb0813",
"hash_input_tokens": "364e98db90c628be",
"hash_cont_tokens": "8b0fdf9d99f09625"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1217,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-geography|0": {
"hashes": {
"hash_examples": "ec3a021e37650e7d",
"hash_full_prompts": "79c20e1e52638229",
"hash_input_tokens": "fb6db8d803e25b33",
"hash_cont_tokens": "0144954f291c77ff"
},
"truncated": 0,
"non_truncated": 199,
"padded": 796,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-history|0": {
"hashes": {
"hash_examples": "b3fad1596f1ae1f9",
"hash_full_prompts": "541c270a0bbab3fd",
"hash_input_tokens": "4461fd574f259120",
"hash_cont_tokens": "f7f0c87190e11336"
},
"truncated": 0,
"non_truncated": 235,
"padded": 940,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-mathqa|0": {
"hashes": {
"hash_examples": "1d1088556861b0b0",
"hash_full_prompts": "6002200193304ad6",
"hash_input_tokens": "e6fb6d8930aa10fa",
"hash_cont_tokens": "1c92bd434c8ca7cf"
},
"truncated": 0,
"non_truncated": 351,
"padded": 1404,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-physics|0": {
"hashes": {
"hash_examples": "eb05f035c7bfca2f",
"hash_full_prompts": "7b606fe0e48d1136",
"hash_input_tokens": "1c39aecbf26045ac",
"hash_cont_tokens": "2c78afa11a936dc4"
},
"truncated": 0,
"non_truncated": 200,
"padded": 800,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:logiqa-en|0": {
"hashes": {
"hash_examples": "0a688a45f69c21e0",
"hash_full_prompts": "70df665a01ea64f7",
"hash_input_tokens": "61f22bb8a98d5e05",
"hash_cont_tokens": "1c7594dbf0a0a533"
},
"truncated": 0,
"non_truncated": 651,
"padded": 2588,
"non_padded": 16,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:logiqa-zh|0": {
"hashes": {
"hash_examples": "620d6888b6012ea5",
"hash_full_prompts": "79066930a44b0f7e",
"hash_input_tokens": "2fe19dd623007024",
"hash_cont_tokens": "16ad57705a79dec4"
},
"truncated": 0,
"non_truncated": 651,
"padded": 2585,
"non_padded": 19,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-ar|0": {
"hashes": {
"hash_examples": "627c8f5ccd5da209",
"hash_full_prompts": "254286f37d4a76ce",
"hash_input_tokens": "dcb768476dabfd30",
"hash_cont_tokens": "32e72f9ac5e990db"
},
"truncated": 0,
"non_truncated": 230,
"padded": 1139,
"non_padded": 11,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-lr|0": {
"hashes": {
"hash_examples": "794641c86de172f5",
"hash_full_prompts": "cf1badbb8c42f6a8",
"hash_input_tokens": "f46eacac0dc8f43d",
"hash_cont_tokens": "f59d4751382d6cd2"
},
"truncated": 0,
"non_truncated": 510,
"padded": 2527,
"non_padded": 23,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-rc|0": {
"hashes": {
"hash_examples": "35981ed917ea01cf",
"hash_full_prompts": "5fbc2eae9474469b",
"hash_input_tokens": "29375d042cf27c7e",
"hash_cont_tokens": "3e5cd22ff2929a47"
},
"truncated": 0,
"non_truncated": 269,
"padded": 1345,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-en|0": {
"hashes": {
"hash_examples": "041c39c646536a1e",
"hash_full_prompts": "171c266251542e36",
"hash_input_tokens": "1ad83cc18fd43db0",
"hash_cont_tokens": "3bc244ff75c1a591"
},
"truncated": 0,
"non_truncated": 206,
"padded": 821,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-en-without-passage|0": {
"hashes": {
"hash_examples": "e4d9284367dff68f",
"hash_full_prompts": "66bc76f2704c6ca7",
"hash_input_tokens": "afd505d4ceeeaa45",
"hash_cont_tokens": "3bc244ff75c1a591"
},
"truncated": 0,
"non_truncated": 206,
"padded": 821,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-math|0": {
"hashes": {
"hash_examples": "01db7291603fc1a0",
"hash_full_prompts": "6016bbecd8642dbe",
"hash_input_tokens": "86192e0452c1707e",
"hash_cont_tokens": "5175b61accc0c5d1"
},
"truncated": 0,
"non_truncated": 220,
"padded": 879,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "da3af66181f18ddf",
"hash_full_prompts": "3554a21b6b69cbf8",
"hash_input_tokens": "4abd38d6a674e0cd",
"hash_cont_tokens": "0fc139d3b2070461"
},
"truncated": 0,
"non_truncated": 5151,
"padded": 21782,
"non_padded": 82,
"num_truncated_few_shots": 0
}
}