lewtun's picture
lewtun HF staff
Upload eval_results/mistralai/Mistral-7B-Instruct-v0.2/main/winogrande/results_2024-03-02T15-34-19.023173.json with huggingface_hub
72b7ed5 verified
raw
history blame
2.19 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 3697122.849731327,
"end_time": 3697193.714011678,
"total_evaluation_time_secondes": "70.86428035097197",
"model_name": "mistralai/Mistral-7B-Instruct-v0.2",
"model_sha": "cf47bb3e18fe41a5351bc36eef76e9c900847c89",
"model_dtype": "torch.bfloat16",
"model_size": "13.99 GB",
"config": null
},
"results": {
"lighteval|winogrande|5": {
"acc": 0.6882399368587214,
"acc_stderr": 0.013018571197638535
}
},
"versions": {
"lighteval|winogrande|5": 0
},
"config_tasks": {
"lighteval|winogrande": {
"name": "winogrande",
"prompt_function": "winogrande",
"hf_repo": "winogrande",
"hf_subset": "winogrande_xl",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"train",
"test",
"validation"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": -1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 1267,
"effective_num_docs": 1267
}
},
"summary_tasks": {
"lighteval|winogrande|5": {
"hashes": {
"hash_examples": "087d5d1a1afd4c7b",
"hash_full_prompts": "21d1cf75825bfc31",
"hash_input_tokens": "74087c8eca355022",
"hash_cont_tokens": "0dbce81de89a82c0"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2391,
"non_padded": 143,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "b9a49975cc41fab7",
"hash_full_prompts": "9056d6e5dbbb5d94",
"hash_input_tokens": "87041500b927b70d",
"hash_cont_tokens": "366469c3d3fbabf2"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2391,
"non_padded": 143,
"num_truncated_few_shots": 0
}
}