lewtun's picture
lewtun HF staff
Upload eval_results/mistralai/Mixtral-8x22B-Instruct-v0.1/main/ifeval/results_2024-04-17T17-55-40.247263.json with huggingface_hub
c1cfe2d verified
raw
history blame
3.04 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 4,
"max_samples": null,
"job_id": "",
"start_time": 503804.153125933,
"end_time": 516496.045088393,
"total_evaluation_time_secondes": "12691.891962459951",
"model_name": "mistralai/Mixtral-8x22B-Instruct-v0.1",
"model_sha": "796bc4393fd5e7e0c0ff1c44de2526419f163003",
"model_dtype": "torch.bfloat16",
"model_size": "263.69 GB",
"config": null
},
"results": {
"extended|ifeval|0": {
"prompt_level_strict_acc": 0.6654343807763401,
"prompt_level_strict_acc_stderr": 0.02030469137804561,
"inst_level_strict_acc": 0.7577937649880095,
"inst_level_strict_acc_stderr": 0.00046708648016664957,
"prompt_level_loose_acc": 0.7060998151571165,
"prompt_level_loose_acc_stderr": 0.019603612015637102,
"inst_level_loose_acc": 0.7889688249400479,
"inst_level_loose_acc_stderr": 0.00044364799222027933
},
"all": {
"prompt_level_strict_acc": 0.6654343807763401,
"prompt_level_strict_acc_stderr": 0.02030469137804561,
"inst_level_strict_acc": 0.7577937649880095,
"inst_level_strict_acc_stderr": 0.00046708648016664957,
"prompt_level_loose_acc": 0.7060998151571165,
"prompt_level_loose_acc_stderr": 0.019603612015637102,
"inst_level_loose_acc": 0.7889688249400479,
"inst_level_loose_acc_stderr": 0.00044364799222027933
}
},
"versions": {
"extended|ifeval|0": 0
},
"config_tasks": {
"extended|ifeval": {
"name": "ifeval",
"prompt_function": "ifeval_prompt",
"hf_repo": "wis-k/instruction-following-eval",
"hf_subset": "default",
"metric": [
"ifeval_metric"
],
"hf_avail_splits": [
"train"
],
"evaluation_splits": [
"train"
],
"few_shots_split": "train",
"few_shots_select": "random_sampling",
"generation_size": 1280,
"stop_sequence": [],
"output_regex": null,
"frozen": false,
"suite": [
"extended"
],
"original_num_docs": 541,
"effective_num_docs": 541,
"trust_dataset": null,
"must_remove_duplicate_docs": null
}
},
"summary_tasks": {
"extended|ifeval|0": {
"hashes": {
"hash_examples": "e99cbf567588d7c6",
"hash_full_prompts": "de8a0ef3cca868ab",
"hash_input_tokens": "ffd86ac3b437afa7",
"hash_cont_tokens": "b923ab19ef0f63c8"
},
"truncated": 541,
"non_truncated": 0,
"padded": 116,
"non_padded": 425,
"effective_few_shots": 0.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "ea046ab2c6fc5928",
"hash_full_prompts": "dca596c8a030146c",
"hash_input_tokens": "7b3b17799cb3faf3",
"hash_cont_tokens": "82b4d4604901f20b"
},
"truncated": 541,
"non_truncated": 0,
"padded": 116,
"non_padded": 425,
"num_truncated_few_shots": 0
}
}