edbeeching's picture
edbeeching HF staff
Upload eval_results/edbeeching/mixtral-8x7b-instruct-v0.1_merged/v0.1/agieval/results_2024-04-14T12-57-28.880808.json with huggingface_hub
eb6ec4c verified
raw
history blame
25.9 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 411888.453877038,
"end_time": 415824.765071092,
"total_evaluation_time_secondes": "3936.3111940540257",
"model_name": "edbeeching/mixtral-8x7b-instruct-v0.1_merged",
"model_sha": "6da964ddd4ac2f1edfa95d2a72082d87f1007cbf",
"model_dtype": "torch.bfloat16",
"model_size": "87.49 GB",
"config": null
},
"results": {
"lighteval|agieval:aqua-rat|0": {
"acc": 0.21653543307086615,
"acc_stderr": 0.025894880176407675,
"acc_norm": 0.20866141732283464,
"acc_norm_stderr": 0.02554712225493389
},
"lighteval|agieval:gaokao-biology|0": {
"acc": 0.18571428571428572,
"acc_stderr": 0.026899110619750637,
"acc_norm": 0.2571428571428571,
"acc_norm_stderr": 0.030231990420749873
},
"lighteval|agieval:gaokao-chemistry|0": {
"acc": 0.24154589371980675,
"acc_stderr": 0.029821595334353808,
"acc_norm": 0.2753623188405797,
"acc_norm_stderr": 0.03112283151905818
},
"lighteval|agieval:gaokao-chinese|0": {
"acc": 0.1991869918699187,
"acc_stderr": 0.025516024108089827,
"acc_norm": 0.1951219512195122,
"acc_norm_stderr": 0.025318330487226405
},
"lighteval|agieval:gaokao-english|0": {
"acc": 0.4150326797385621,
"acc_stderr": 0.0282135041778241,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.02782610930728369
},
"lighteval|agieval:gaokao-geography|0": {
"acc": 0.22110552763819097,
"acc_stderr": 0.02949215554716756,
"acc_norm": 0.24623115577889448,
"acc_norm_stderr": 0.030616673158037296
},
"lighteval|agieval:gaokao-history|0": {
"acc": 0.2425531914893617,
"acc_stderr": 0.028020226271200214,
"acc_norm": 0.225531914893617,
"acc_norm_stderr": 0.02732107841738753
},
"lighteval|agieval:gaokao-mathqa|0": {
"acc": 0.22507122507122507,
"acc_stderr": 0.022323221011581094,
"acc_norm": 0.21652421652421652,
"acc_norm_stderr": 0.022015674947168887
},
"lighteval|agieval:gaokao-physics|0": {
"acc": 0.245,
"acc_stderr": 0.030488073292114216,
"acc_norm": 0.255,
"acc_norm_stderr": 0.030897382432918608
},
"lighteval|agieval:logiqa-en|0": {
"acc": 0.22427035330261136,
"acc_stderr": 0.01636004334826551,
"acc_norm": 0.27342549923195086,
"acc_norm_stderr": 0.01748247454768128
},
"lighteval|agieval:logiqa-zh|0": {
"acc": 0.2196620583717358,
"acc_stderr": 0.01623910941493393,
"acc_norm": 0.28110599078341014,
"acc_norm_stderr": 0.01763237462646001
},
"lighteval|agieval:lsat-ar|0": {
"acc": 0.16956521739130434,
"acc_stderr": 0.024797243687717664,
"acc_norm": 0.1391304347826087,
"acc_norm_stderr": 0.02286978106640669
},
"lighteval|agieval:lsat-lr|0": {
"acc": 0.23921568627450981,
"acc_stderr": 0.018908919409402702,
"acc_norm": 0.27647058823529413,
"acc_norm_stderr": 0.019824108780753
},
"lighteval|agieval:lsat-rc|0": {
"acc": 0.17472118959107807,
"acc_stderr": 0.023195606852050955,
"acc_norm": 0.17472118959107807,
"acc_norm_stderr": 0.02319560685205097
},
"lighteval|agieval:sat-en|0": {
"acc": 0.36893203883495146,
"acc_stderr": 0.03370034302177868,
"acc_norm": 0.2766990291262136,
"acc_norm_stderr": 0.03124542318927996
},
"lighteval|agieval:sat-en-without-passage|0": {
"acc": 0.3058252427184466,
"acc_stderr": 0.032180600400244896,
"acc_norm": 0.2815533980582524,
"acc_norm_stderr": 0.03141236994965782
},
"lighteval|agieval:sat-math|0": {
"acc": 0.2772727272727273,
"acc_stderr": 0.030249537675886704,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.029435485225874185
},
"lighteval|agieval:_average|0": {
"acc": 0.24536527894526952,
"acc_stderr": 0.02601765849110413,
"acc_norm": 0.2482106092501909,
"acc_norm_stderr": 0.026117342187231076
},
"all": {
"acc": 0.24536527894526952,
"acc_stderr": 0.02601765849110413,
"acc_norm": 0.2482106092501909,
"acc_norm_stderr": 0.026117342187231076
}
},
"versions": {
"lighteval|agieval:aqua-rat|0": 0,
"lighteval|agieval:gaokao-biology|0": 0,
"lighteval|agieval:gaokao-chemistry|0": 0,
"lighteval|agieval:gaokao-chinese|0": 0,
"lighteval|agieval:gaokao-english|0": 0,
"lighteval|agieval:gaokao-geography|0": 0,
"lighteval|agieval:gaokao-history|0": 0,
"lighteval|agieval:gaokao-mathqa|0": 0,
"lighteval|agieval:gaokao-physics|0": 0,
"lighteval|agieval:logiqa-en|0": 0,
"lighteval|agieval:logiqa-zh|0": 0,
"lighteval|agieval:lsat-ar|0": 0,
"lighteval|agieval:lsat-lr|0": 0,
"lighteval|agieval:lsat-rc|0": 0,
"lighteval|agieval:sat-en|0": 0,
"lighteval|agieval:sat-en-without-passage|0": 0,
"lighteval|agieval:sat-math|0": 0
},
"config_tasks": {
"lighteval|agieval:aqua-rat": {
"name": "agieval:aqua-rat",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-aqua-rat",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 254,
"effective_num_docs": 254,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-biology": {
"name": "agieval:gaokao-biology",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-biology",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 210,
"effective_num_docs": 210,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-chemistry": {
"name": "agieval:gaokao-chemistry",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-chemistry",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 207,
"effective_num_docs": 207,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-chinese": {
"name": "agieval:gaokao-chinese",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-chinese",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 246,
"effective_num_docs": 246,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-english": {
"name": "agieval:gaokao-english",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-english",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 306,
"effective_num_docs": 306,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-geography": {
"name": "agieval:gaokao-geography",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-geography",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 199,
"effective_num_docs": 199,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-history": {
"name": "agieval:gaokao-history",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-history",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 235,
"effective_num_docs": 235,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-mathqa": {
"name": "agieval:gaokao-mathqa",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-mathqa",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 351,
"effective_num_docs": 351,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:gaokao-physics": {
"name": "agieval:gaokao-physics",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-gaokao-physics",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 200,
"effective_num_docs": 200,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:logiqa-en": {
"name": "agieval:logiqa-en",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-logiqa-en",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 651,
"effective_num_docs": 651,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:logiqa-zh": {
"name": "agieval:logiqa-zh",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-logiqa-zh",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 651,
"effective_num_docs": 651,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-ar": {
"name": "agieval:lsat-ar",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-ar",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 230,
"effective_num_docs": 230,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-lr": {
"name": "agieval:lsat-lr",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-lr",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 510,
"effective_num_docs": 510,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:lsat-rc": {
"name": "agieval:lsat-rc",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-lsat-rc",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 269,
"effective_num_docs": 269,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-en": {
"name": "agieval:sat-en",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-en",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 206,
"effective_num_docs": 206,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-en-without-passage": {
"name": "agieval:sat-en-without-passage",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-en-without-passage",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 206,
"effective_num_docs": 206,
"trust_dataset": true,
"must_remove_duplicate_docs": null
},
"lighteval|agieval:sat-math": {
"name": "agieval:sat-math",
"prompt_function": "agieval",
"hf_repo": "dmayhem93/agieval-sat-math",
"hf_subset": "default",
"metric": [
"loglikelihood_acc",
"loglikelihood_acc_norm_nospace"
],
"hf_avail_splits": [
"test"
],
"evaluation_splits": [
"test"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": 1,
"stop_sequence": null,
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 220,
"effective_num_docs": 220,
"trust_dataset": true,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"lighteval|agieval:aqua-rat|0": {
"hashes": {
"hash_examples": "f09607f69e5b7525",
"hash_full_prompts": "3861b27efd5a6170",
"hash_input_tokens": "24b3053366cab5d6",
"hash_cont_tokens": "7c2bcf33ed1e5ae3"
},
"truncated": 0,
"non_truncated": 254,
"padded": 1270,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-biology|0": {
"hashes": {
"hash_examples": "f262eaf4a72db963",
"hash_full_prompts": "1ef14dcf5933ff33",
"hash_input_tokens": "3553b0507d1c6868",
"hash_cont_tokens": "b3febf85776696e1"
},
"truncated": 0,
"non_truncated": 210,
"padded": 840,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-chemistry|0": {
"hashes": {
"hash_examples": "47f2e649f58d9da5",
"hash_full_prompts": "324a4d07d3b7de40",
"hash_input_tokens": "62a14b0cfb722cf4",
"hash_cont_tokens": "711fb41221b6515e"
},
"truncated": 0,
"non_truncated": 207,
"padded": 830,
"non_padded": 1,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-chinese|0": {
"hashes": {
"hash_examples": "1010b21fde4726ab",
"hash_full_prompts": "938611fd6ecdbf4e",
"hash_input_tokens": "19eb2367f1cb72de",
"hash_cont_tokens": "393b1820b8f4534f"
},
"truncated": 0,
"non_truncated": 246,
"padded": 981,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-english|0": {
"hashes": {
"hash_examples": "4864e492a350ae93",
"hash_full_prompts": "17369402cc020876",
"hash_input_tokens": "066e65f806b474dd",
"hash_cont_tokens": "dd72ceb1d7224598"
},
"truncated": 0,
"non_truncated": 306,
"padded": 1222,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-geography|0": {
"hashes": {
"hash_examples": "ec3a021e37650e7d",
"hash_full_prompts": "79c20e1e52638229",
"hash_input_tokens": "2e676a0441f390c7",
"hash_cont_tokens": "e06462bcee629ea8"
},
"truncated": 0,
"non_truncated": 199,
"padded": 794,
"non_padded": 2,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-history|0": {
"hashes": {
"hash_examples": "b3fad1596f1ae1f9",
"hash_full_prompts": "541c270a0bbab3fd",
"hash_input_tokens": "cf7e8a42c7bdf5cd",
"hash_cont_tokens": "87cce58a4ec6cfd8"
},
"truncated": 0,
"non_truncated": 235,
"padded": 935,
"non_padded": 5,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-mathqa|0": {
"hashes": {
"hash_examples": "1d1088556861b0b0",
"hash_full_prompts": "6002200193304ad6",
"hash_input_tokens": "a2cfd284204fffcc",
"hash_cont_tokens": "771402d59229cbae"
},
"truncated": 0,
"non_truncated": 351,
"padded": 1401,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:gaokao-physics|0": {
"hashes": {
"hash_examples": "eb05f035c7bfca2f",
"hash_full_prompts": "7b606fe0e48d1136",
"hash_input_tokens": "798a188685f3c7ba",
"hash_cont_tokens": "f1574dddbe4231e1"
},
"truncated": 0,
"non_truncated": 200,
"padded": 797,
"non_padded": 3,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:logiqa-en|0": {
"hashes": {
"hash_examples": "0a688a45f69c21e0",
"hash_full_prompts": "70df665a01ea64f7",
"hash_input_tokens": "4ef64d022f86d868",
"hash_cont_tokens": "106c1564fdd5ff8e"
},
"truncated": 0,
"non_truncated": 651,
"padded": 2597,
"non_padded": 7,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:logiqa-zh|0": {
"hashes": {
"hash_examples": "620d6888b6012ea5",
"hash_full_prompts": "11b751123ca8b711",
"hash_input_tokens": "d30860e1a31f27b3",
"hash_cont_tokens": "22d3640f03c44bb0"
},
"truncated": 0,
"non_truncated": 651,
"padded": 2581,
"non_padded": 23,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-ar|0": {
"hashes": {
"hash_examples": "627c8f5ccd5da209",
"hash_full_prompts": "254286f37d4a76ce",
"hash_input_tokens": "dc24a391ad28e575",
"hash_cont_tokens": "9e13af9ad8f5f78e"
},
"truncated": 0,
"non_truncated": 230,
"padded": 1140,
"non_padded": 10,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-lr|0": {
"hashes": {
"hash_examples": "794641c86de172f5",
"hash_full_prompts": "cf1badbb8c42f6a8",
"hash_input_tokens": "2252f2846ec8cecf",
"hash_cont_tokens": "86333847359ceee5"
},
"truncated": 0,
"non_truncated": 510,
"padded": 2525,
"non_padded": 25,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:lsat-rc|0": {
"hashes": {
"hash_examples": "35981ed917ea01cf",
"hash_full_prompts": "5fbc2eae9474469b",
"hash_input_tokens": "768fcfff60a6a668",
"hash_cont_tokens": "cb47c6b984067525"
},
"truncated": 0,
"non_truncated": 269,
"padded": 1345,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-en|0": {
"hashes": {
"hash_examples": "041c39c646536a1e",
"hash_full_prompts": "171c266251542e36",
"hash_input_tokens": "c5342c28aac7b5e9",
"hash_cont_tokens": "cb01422bf828aefe"
},
"truncated": 0,
"non_truncated": 206,
"padded": 821,
"non_padded": 0,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-en-without-passage|0": {
"hashes": {
"hash_examples": "e4d9284367dff68f",
"hash_full_prompts": "66bc76f2704c6ca7",
"hash_input_tokens": "582f986b74383001",
"hash_cont_tokens": "cb01422bf828aefe"
},
"truncated": 0,
"non_truncated": 206,
"padded": 817,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
},
"lighteval|agieval:sat-math|0": {
"hashes": {
"hash_examples": "01db7291603fc1a0",
"hash_full_prompts": "6016bbecd8642dbe",
"hash_input_tokens": "0f1ec1ab676fb3e4",
"hash_cont_tokens": "0c4980b69a75cb83"
},
"truncated": 0,
"non_truncated": 220,
"padded": 876,
"non_padded": 4,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "da3af66181f18ddf",
"hash_full_prompts": "e341944cee05878c",
"hash_input_tokens": "5f2600d57bfbb6aa",
"hash_cont_tokens": "7f70e41f6e93ac40"
},
"truncated": 0,
"non_truncated": 5151,
"padded": 21772,
"non_padded": 92,
"num_truncated_few_shots": 0
}
}