lewtun's picture
lewtun HF staff
Upload eval_results/mistralai/Mistral-7B-Instruct-v0.1/main/ifeval/results_2024-03-28T17-57-27.009185.json with huggingface_hub
65cac4c verified
raw
history blame
3.03 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 4,
"max_samples": null,
"job_id": "",
"start_time": 837108.9440716,
"end_time": 837855.735161022,
"total_evaluation_time_secondes": "746.7910894220695",
"model_name": "mistralai/Mistral-7B-Instruct-v0.1",
"model_sha": "73068f3702d050a2fd5aa2ca1e612e5036429398",
"model_dtype": "torch.bfloat16",
"model_size": "13.99 GB",
"config": null
},
"results": {
"extended|ifeval|0": {
"prompt_level_strict_acc": 0.3974121996303142,
"prompt_level_strict_acc_stderr": 0.021058812847519866,
"inst_level_strict_acc": 0.49760191846522783,
"inst_level_strict_acc_stderr": 0.0005390891114432318,
"prompt_level_loose_acc": 0.4269870609981516,
"prompt_level_loose_acc_stderr": 0.021285933050061313,
"inst_level_loose_acc": 0.5323741007194245,
"inst_level_loose_acc_stderr": 0.0005378503905090047
},
"all": {
"prompt_level_strict_acc": 0.3974121996303142,
"prompt_level_strict_acc_stderr": 0.021058812847519866,
"inst_level_strict_acc": 0.49760191846522783,
"inst_level_strict_acc_stderr": 0.0005390891114432318,
"prompt_level_loose_acc": 0.4269870609981516,
"prompt_level_loose_acc_stderr": 0.021285933050061313,
"inst_level_loose_acc": 0.5323741007194245,
"inst_level_loose_acc_stderr": 0.0005378503905090047
}
},
"versions": {
"extended|ifeval|0": 0
},
"config_tasks": {
"extended|ifeval": {
"name": "ifeval",
"prompt_function": "ifeval_prompt",
"hf_repo": "wis-k/instruction-following-eval",
"hf_subset": "default",
"metric": [
"ifeval_metric"
],
"hf_avail_splits": [
"train"
],
"evaluation_splits": [
"train"
],
"few_shots_split": "train",
"few_shots_select": "random_sampling",
"generation_size": 1280,
"stop_sequence": [],
"output_regex": null,
"frozen": false,
"suite": [
"extended"
],
"original_num_docs": 541,
"effective_num_docs": 541,
"trust_dataset": null,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"extended|ifeval|0": {
"hashes": {
"hash_examples": "e99cbf567588d7c6",
"hash_full_prompts": "50f8a8f5a08077cd",
"hash_input_tokens": "fb1ed90207cf652b",
"hash_cont_tokens": "86efaf3c9d7e9562"
},
"truncated": 541,
"non_truncated": 0,
"padded": 116,
"non_padded": 425,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "ea046ab2c6fc5928",
"hash_full_prompts": "9c04db679f2c9132",
"hash_input_tokens": "af9a63ef7d2de8ba",
"hash_cont_tokens": "8a8e24e6d741211b"
},
"truncated": 541,
"non_truncated": 0,
"padded": 116,
"non_padded": 425,
"num_truncated_few_shots": 0
}
}