open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/qwen-1.5-1.8b-dpo
/v0.4
/winogrande
/results_2024-03-22T22-20-29.917218.json

edbeeching
HF staff
Upload eval_results/HuggingFaceH4/qwen-1.5-1.8b-dpo/v0.4/winogrande/results_2024-03-22T22-20-29.917218.json with huggingface_hub
3c40c82
verified
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 696813.153544088, | |
"end_time": 696857.76366772, | |
"total_evaluation_time_secondes": "44.61012363201007", | |
"model_name": "HuggingFaceH4/qwen-1.5-1.8b-dpo", | |
"model_sha": "4bdedb98bed9c2e5e2c5209fa40071c0fa459729", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "3.79 GB", | |
"config": null | |
}, | |
"results": { | |
"leaderboard|winogrande|5": { | |
"acc": 0.56353591160221, | |
"acc_stderr": 0.013938569465677019 | |
} | |
}, | |
"versions": { | |
"leaderboard|winogrande|5": 0 | |
}, | |
"config_tasks": { | |
"leaderboard|winogrande": { | |
"name": "winogrande", | |
"prompt_function": "winogrande", | |
"hf_repo": "winogrande", | |
"hf_subset": "winogrande_xl", | |
"metric": [ | |
"loglikelihood_acc" | |
], | |
"hf_avail_splits": [ | |
"train", | |
"test", | |
"validation" | |
], | |
"evaluation_splits": [ | |
"validation" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling", | |
"generation_size": -1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"leaderboard" | |
], | |
"original_num_docs": 1267, | |
"effective_num_docs": 1267, | |
"trust_dataset": true | |
} | |
}, | |
"summary_tasks": { | |
"leaderboard|winogrande|5": { | |
"hashes": { | |
"hash_examples": "087d5d1a1afd4c7b", | |
"hash_full_prompts": "29e044bcf40d6a6d", | |
"hash_input_tokens": "1e4162d943d8926d", | |
"hash_cont_tokens": "af83e7e371fcf4d4" | |
}, | |
"truncated": 0, | |
"non_truncated": 1267, | |
"padded": 2356, | |
"non_padded": 178, | |
"effective_few_shots": 5.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "b9a49975cc41fab7", | |
"hash_full_prompts": "2f908b2b9b5ec583", | |
"hash_input_tokens": "b9aa9329c9b8f4e4", | |
"hash_cont_tokens": "cf29ee9ce86b26c4" | |
}, | |
"truncated": 0, | |
"non_truncated": 1267, | |
"padded": 2356, | |
"non_padded": 178, | |
"num_truncated_few_shots": 0 | |
} | |
} |