open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/Qwen1.5-1.8B-Chat-dpo
/v0.2
/arc
/results_2024-03-22T14-26-45.706403.json

edbeeching
HF staff
Upload eval_results/HuggingFaceH4/Qwen1.5-1.8B-Chat-dpo/v0.2/arc/results_2024-03-22T14-26-45.706403.json with huggingface_hub
05ccf42
verified
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 1, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 1550536.338015852, | |
"end_time": 1550625.807493872, | |
"total_evaluation_time_secondes": "89.46947801997885", | |
"model_name": "HuggingFaceH4/Qwen1.5-1.8B-Chat-dpo", | |
"model_sha": "18082ab2275b8f692849770c11a458ac664d216e", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "3.8 GB", | |
"config": null | |
}, | |
"results": { | |
"leaderboard|arc:challenge|25": { | |
"acc": 0.3728668941979522, | |
"acc_stderr": 0.014131176760131163, | |
"acc_norm": 0.37627986348122866, | |
"acc_norm_stderr": 0.014157022555407166 | |
} | |
}, | |
"versions": { | |
"leaderboard|arc:challenge|25": 0 | |
}, | |
"config_tasks": { | |
"leaderboard|arc:challenge": { | |
"name": "arc:challenge", | |
"prompt_function": "arc", | |
"hf_repo": "ai2_arc", | |
"hf_subset": "ARC-Challenge", | |
"metric": [ | |
"loglikelihood_acc", | |
"loglikelihood_acc_norm_nospace" | |
], | |
"hf_avail_splits": [ | |
"train", | |
"test" | |
], | |
"evaluation_splits": [ | |
"test" | |
], | |
"few_shots_split": null, | |
"few_shots_select": "random_sampling_from_train", | |
"generation_size": 1, | |
"stop_sequence": [ | |
"\n" | |
], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"leaderboard", | |
"arc" | |
], | |
"original_num_docs": 1172, | |
"effective_num_docs": 1172, | |
"trust_dataset": true | |
} | |
}, | |
"summary_tasks": { | |
"leaderboard|arc:challenge|25": { | |
"hashes": { | |
"hash_examples": "17b0cae357c0259e", | |
"hash_full_prompts": "4613138cb84a1c53", | |
"hash_input_tokens": "85ecaa299a6a917a", | |
"hash_cont_tokens": "da3689055cb5fa28" | |
}, | |
"truncated": 0, | |
"non_truncated": 1172, | |
"padded": 4651, | |
"non_padded": 36, | |
"effective_few_shots": 25.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "aaa6929c6d3771fb", | |
"hash_full_prompts": "de0ea2e1cc95d72a", | |
"hash_input_tokens": "549aeedee8982096", | |
"hash_cont_tokens": "da91173bce47858f" | |
}, | |
"truncated": 0, | |
"non_truncated": 1172, | |
"padded": 4651, | |
"non_padded": 36, | |
"num_truncated_few_shots": 0 | |
} | |
} |