open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/mistral-7b-odpo
/v1.1
/truthfulqa
/results_2024-03-27T22-54-58.436239.json
![edbeeching's picture](https://cdn-avatars.huggingface.co/v1/production/uploads/1644220542819-noauth.jpeg)
edbeeching
HF staff
Upload eval_results/HuggingFaceH4/mistral-7b-odpo/v1.1/truthfulqa/results_2024-03-27T22-54-58.436239.json with huggingface_hub
d89ee25
verified
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1126251.335380151, | |
"end_time": 1126371.685916148, | |
"total_evaluation_time_secondes": "120.35053599695675", | |
"model_name": "HuggingFaceH4/mistral-7b-odpo", | |
"model_sha": "639db5fc8e84f3f3d472d04032132c3b914a835d", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "13.99 GB", | |
"config": null | |
}, | |
"results": { | |
"leaderboard|truthfulqa:mc|0": { | |
"truthfulqa_mc1": 0.40024479804161567, | |
"truthfulqa_mc1_stderr": 0.017151605555749138, | |
"truthfulqa_mc2": 0.5626348120796845, | |
"truthfulqa_mc2_stderr": 0.016408586637166774 | |
} | |
}, | |
"versions": { | |
"leaderboard|truthfulqa:mc|0": 0 | |
}, | |
"config_tasks": { | |
"leaderboard|truthfulqa:mc": { | |
"name": "truthfulqa:mc", | |
"prompt_function": "truthful_qa_multiple_choice", | |
"hf_repo": "truthful_qa", | |
"hf_subset": "multiple_choice", | |
"metric": [ | |
"truthfulqa_mc_metrics" | |
], | |
"hf_avail_splits": [ | |
"validation" | |
], | |
"evaluation_splits": [ | |
"validation" | |
], | |
"few_shots_split": null, | |
"few_shots_select": null, | |
"generation_size": -1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"leaderboard" | |
], | |
"original_num_docs": 817, | |
"effective_num_docs": 817, | |
"trust_dataset": true | |
} | |
}, | |
"summary_tasks": { | |
"leaderboard|truthfulqa:mc|0": { | |
"hashes": { | |
"hash_examples": "36a6d90e75d92d4a", | |
"hash_full_prompts": "87686035f32c79c9", | |
"hash_input_tokens": "e6efb4cba0ced254", | |
"hash_cont_tokens": "9ee6fa47d927a815" | |
}, | |
"truncated": 0, | |
"non_truncated": 817, | |
"padded": 9429, | |
"non_padded": 567, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "aed1dfc67e53d0f2", | |
"hash_full_prompts": "3fe6a628d5ea8051", | |
"hash_input_tokens": "910f5a1de3b8f8fe", | |
"hash_cont_tokens": "ce945fa45483fa47" | |
}, | |
"truncated": 0, | |
"non_truncated": 817, | |
"padded": 9429, | |
"non_padded": 567, | |
"num_truncated_few_shots": 0 | |
} | |
} |