open-r1-eval-leaderboard
/
eval_results
/AI-MO
/deepseek-math-7b-sft
/aimo_v01.41
/gsm8k
/results_2024-04-25T22-27-34.001206.json

edbeeching
HF staff
Upload eval_results/AI-MO/deepseek-math-7b-sft/aimo_v01.41/gsm8k/results_2024-04-25T22-27-34.001206.json with huggingface_hub
6bdab71
verified
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 4, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1836474.402691832, | |
"end_time": 1836865.935514845, | |
"total_evaluation_time_secondes": "391.5328230129089", | |
"model_name": "AI-MO/deepseek-math-7b-sft", | |
"model_sha": "6d9b2ab05af9908eac6d5eeb8e37ae2c8ea22dba", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "12.93 GB", | |
"config": null | |
}, | |
"results": { | |
"lighteval|gsm8k|5": { | |
"qem": 0.533737680060652, | |
"qem_stderr": 0.01374109641222676 | |
}, | |
"all": { | |
"qem": 0.533737680060652, | |
"qem_stderr": 0.01374109641222676 | |
} | |
}, | |
"versions": { | |
"lighteval|gsm8k|5": 0 | |
}, | |
"config_tasks": { | |
"lighteval|gsm8k": { | |
"name": "gsm8k", | |
"prompt_function": "gsm8k", | |
"hf_repo": "gsm8k", | |
"hf_subset": "main", | |
"metric": [ | |
"quasi_exact_match_gsm8k" | |
], | |
"hf_avail_splits": [ | |
"train", | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling_from_train", | |
"generation_size": 256, | |
"stop_sequence": [ | |
"Question:" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"lighteval" | |
], | |
"original_num_docs": 1319, | |
"effective_num_docs": 1319, | |
"trust_dataset": true, | |
"must_remove_duplicate_docs": null | |
} | |
}, | |
"summary_tasks": { | |
"lighteval|gsm8k|5": { | |
"hashes": { | |
"hash_examples": "0ed016e24e7512fd", | |
"hash_full_prompts": "b6d97958f0ed1df8", | |
"hash_input_tokens": "3f58ee5e568409ec", | |
"hash_cont_tokens": "2b7d292f91d596af" | |
}, | |
"truncated": 1319, | |
"non_truncated": 0, | |
"padded": 611, | |
"non_padded": 708, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "bc71463e88551d0e", | |
"hash_full_prompts": "b5a5dacaed0816ff", | |
"hash_input_tokens": "e4e0fd8eba82bd47", | |
"hash_cont_tokens": "ecec1047a09d9b69" | |
}, | |
"truncated": 1319, | |
"non_truncated": 0, | |
"padded": 611, | |
"non_padded": 708, | |
"num_truncated_few_shots": 0 | |
} | |
} |