lewtun's picture
lewtun HF staff
Upload eval_results/kaist-ai/mistral-orpo-capybara-7k/main/ifeval/results_2024-03-28T19-24-15.810436.json with huggingface_hub
9e5ba40 verified
raw
history blame
3.03 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 4,
"max_samples": null,
"job_id": "",
"start_time": 842266.978467673,
"end_time": 843063.223526189,
"total_evaluation_time_secondes": "796.2450585160404",
"model_name": "kaist-ai/mistral-orpo-capybara-7k",
"model_sha": "24c1172060658a1923c9b454796857e2cc59fbeb",
"model_dtype": "torch.bfloat16",
"model_size": "13.99 GB",
"config": null
},
"results": {
"extended|ifeval|0": {
"prompt_level_strict_acc": 0.4898336414048059,
"prompt_level_strict_acc_stderr": 0.021512126001350784,
"inst_level_strict_acc": 0.6067146282973621,
"inst_level_strict_acc_stderr": 0.0005046766559749995,
"prompt_level_loose_acc": 0.5360443622920518,
"prompt_level_loose_acc_stderr": 0.021460592823736725,
"inst_level_loose_acc": 0.6486810551558753,
"inst_level_loose_acc_stderr": 0.0005001448958509454
},
"all": {
"prompt_level_strict_acc": 0.4898336414048059,
"prompt_level_strict_acc_stderr": 0.021512126001350784,
"inst_level_strict_acc": 0.6067146282973621,
"inst_level_strict_acc_stderr": 0.0005046766559749995,
"prompt_level_loose_acc": 0.5360443622920518,
"prompt_level_loose_acc_stderr": 0.021460592823736725,
"inst_level_loose_acc": 0.6486810551558753,
"inst_level_loose_acc_stderr": 0.0005001448958509454
}
},
"versions": {
"extended|ifeval|0": 0
},
"config_tasks": {
"extended|ifeval": {
"name": "ifeval",
"prompt_function": "ifeval_prompt",
"hf_repo": "wis-k/instruction-following-eval",
"hf_subset": "default",
"metric": [
"ifeval_metric"
],
"hf_avail_splits": [
"train"
],
"evaluation_splits": [
"train"
],
"few_shots_split": "train",
"few_shots_select": "random_sampling",
"generation_size": 1280,
"stop_sequence": [],
"output_regex": null,
"frozen": false,
"suite": [
"extended"
],
"original_num_docs": 541,
"effective_num_docs": 541,
"trust_dataset": null,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"extended|ifeval|0": {
"hashes": {
"hash_examples": "e99cbf567588d7c6",
"hash_full_prompts": "46932ca113ba6b0c",
"hash_input_tokens": "8b2e0300fbfb70bc",
"hash_cont_tokens": "2b91d65987e106bf"
},
"truncated": 541,
"non_truncated": 0,
"padded": 123,
"non_padded": 418,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "ea046ab2c6fc5928",
"hash_full_prompts": "8ed52f7b74eb0590",
"hash_input_tokens": "9f4b7e9e7ff95d4a",
"hash_cont_tokens": "36643007d3407fd5"
},
"truncated": 541,
"non_truncated": 0,
"padded": 123,
"non_padded": 418,
"num_truncated_few_shots": 0
}
}