lewtun's picture
lewtun HF staff
Upload eval_results/NousResearch/Nous-Hermes-2-Yi-34B/main/winogrande/results_2024-03-04T20-34-28.350705.json with huggingface_hub
c055f33 verified
raw
history blame
2.19 kB
{
"config_general": {
"lighteval_sha": "?",
"num_fewshot_seeds": 1,
"override_batch_size": 1,
"max_samples": null,
"job_id": "",
"start_time": 2528263.841499473,
"end_time": 2528617.22314353,
"total_evaluation_time_secondes": "353.38164405710995",
"model_name": "NousResearch/Nous-Hermes-2-Yi-34B",
"model_sha": "fcb0a8847e76aea14aba9aa44009d4418ad7c18f",
"model_dtype": "torch.bfloat16",
"model_size": "64.17 GB",
"config": null
},
"results": {
"lighteval|winogrande|5": {
"acc": 0.8058405682715075,
"acc_stderr": 0.01111698339239267
}
},
"versions": {
"lighteval|winogrande|5": 0
},
"config_tasks": {
"lighteval|winogrande": {
"name": "winogrande",
"prompt_function": "winogrande",
"hf_repo": "winogrande",
"hf_subset": "winogrande_xl",
"metric": [
"loglikelihood_acc"
],
"hf_avail_splits": [
"train",
"test",
"validation"
],
"evaluation_splits": [
"validation"
],
"few_shots_split": null,
"few_shots_select": "random_sampling",
"generation_size": -1,
"stop_sequence": [
"\n"
],
"output_regex": null,
"frozen": false,
"suite": [
"lighteval"
],
"original_num_docs": 1267,
"effective_num_docs": 1267
}
},
"summary_tasks": {
"lighteval|winogrande|5": {
"hashes": {
"hash_examples": "087d5d1a1afd4c7b",
"hash_full_prompts": "29e044bcf40d6a6d",
"hash_input_tokens": "40a1a38e49b4bbf6",
"hash_cont_tokens": "ce554afdcfed3691"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2289,
"non_padded": 245,
"effective_few_shots": 5.0,
"num_truncated_few_shots": 0
}
},
"summary_general": {
"hashes": {
"hash_examples": "b9a49975cc41fab7",
"hash_full_prompts": "2f908b2b9b5ec583",
"hash_input_tokens": "47df941b8db9c47a",
"hash_cont_tokens": "8582fd788bb8f289"
},
"truncated": 0,
"non_truncated": 1267,
"padded": 2289,
"non_padded": 245,
"num_truncated_few_shots": 0
}
}