open-r1-eval-leaderboard
/
eval_results
/HuggingFaceH4
/qwen-1.5-1.8b-dpo
/v0.0
/ifeval
/results_2024-03-14T12-34-18.153949.json

edbeeching
HF staff
Upload eval_results/HuggingFaceH4/qwen-1.5-1.8b-dpo/v0.0/ifeval/results_2024-03-14T12-34-18.153949.json with huggingface_hub
44af62b
verified
{ | |
"config_general": { | |
"lighteval_sha": "?", | |
"num_fewshot_seeds": 1, | |
"override_batch_size": 4, | |
"max_samples": null, | |
"job_id": "", | |
"start_time": 854012.543224792, | |
"end_time": 854534.871504916, | |
"total_evaluation_time_secondes": "522.3282801239984", | |
"model_name": "HuggingFaceH4/qwen-1.5-1.8b-dpo", | |
"model_sha": "7e0ea69dc7749ce082a9f0ab72b09bb2baabe757", | |
"model_dtype": "torch.bfloat16", | |
"model_size": "3.79 GB", | |
"config": null | |
}, | |
"results": { | |
"custom|ifeval|0": { | |
"prompt_level_strict_acc": 0.1756007393715342, | |
"prompt_level_strict_acc_stderr": 0.016373257312057913, | |
"inst_level_strict_acc": 0.30335731414868106, | |
"inst_level_strict_acc_stderr": 0.0004875489221993535, | |
"prompt_level_loose_acc": 0.19593345656192238, | |
"prompt_level_loose_acc_stderr": 0.01708061155345544, | |
"inst_level_loose_acc": 0.3249400479616307, | |
"inst_level_loose_acc_stderr": 0.0004899322156066653 | |
} | |
}, | |
"versions": { | |
"custom|ifeval|0": 0 | |
}, | |
"config_tasks": { | |
"custom|ifeval": { | |
"name": "ifeval", | |
"prompt_function": "ifeval_prompt", | |
"hf_repo": "wis-k/instruction-following-eval", | |
"hf_subset": "default", | |
"metric": [ | |
"ifeval_metric" | |
], | |
"hf_avail_splits": [ | |
"train" | |
], | |
"evaluation_splits": [ | |
"train" | |
], | |
"few_shots_split": "train", | |
"few_shots_select": "random_sampling", | |
"generation_size": 1280, | |
"stop_sequence": [], | |
"output_regex": null, | |
"frozen": false, | |
"suite": [ | |
"custom" | |
], | |
"original_num_docs": 541, | |
"effective_num_docs": 541 | |
} | |
}, | |
"summary_tasks": { | |
"custom|ifeval|0": { | |
"hashes": { | |
"hash_examples": "e99cbf567588d7c6", | |
"hash_full_prompts": "7ea7bf2a8edba8f4", | |
"hash_input_tokens": "406f216e2e739d61", | |
"hash_cont_tokens": "7f59d0c3b248a823" | |
}, | |
"truncated": 0, | |
"non_truncated": 541, | |
"padded": 0, | |
"non_padded": 541, | |
"effective_few_shots": 0.0, | |
"num_truncated_few_shots": 0 | |
} | |
}, | |
"summary_general": { | |
"hashes": { | |
"hash_examples": "ea046ab2c6fc5928", | |
"hash_full_prompts": "45f8422f6ad2da79", | |
"hash_input_tokens": "7be1d930c9579e40", | |
"hash_cont_tokens": "8daa303b6e77104b" | |
}, | |
"truncated": 0, | |
"non_truncated": 541, | |
"padded": 0, | |
"non_padded": 541, | |
"num_truncated_few_shots": 0 | |
} | |
} |