Upload eval_results/mistralai/Mistral-7B-Instruct-v0.2/main/hellaswag/results_2024-03-02T15-40-34.144669.json with huggingface_hub
Browse files
eval_results/mistralai/Mistral-7B-Instruct-v0.2/main/hellaswag/results_2024-03-02T15-40-34.144669.json
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"config_general": {
|
3 |
+
"lighteval_sha": "?",
|
4 |
+
"num_fewshot_seeds": 1,
|
5 |
+
"override_batch_size": 1,
|
6 |
+
"max_samples": null,
|
7 |
+
"job_id": "",
|
8 |
+
"start_time": 2346586.58563442,
|
9 |
+
"end_time": 2347048.295203403,
|
10 |
+
"total_evaluation_time_secondes": "461.7095689829439",
|
11 |
+
"model_name": "mistralai/Mistral-7B-Instruct-v0.2",
|
12 |
+
"model_sha": "cf47bb3e18fe41a5351bc36eef76e9c900847c89",
|
13 |
+
"model_dtype": "torch.bfloat16",
|
14 |
+
"model_size": "13.99 GB",
|
15 |
+
"config": null
|
16 |
+
},
|
17 |
+
"results": {
|
18 |
+
"lighteval|hellaswag|10": {
|
19 |
+
"acc": 0.6746664011153157,
|
20 |
+
"acc_stderr": 0.004675418774314236,
|
21 |
+
"acc_norm": 0.8344951204939255,
|
22 |
+
"acc_norm_stderr": 0.003708760752685524
|
23 |
+
}
|
24 |
+
},
|
25 |
+
"versions": {
|
26 |
+
"lighteval|hellaswag|10": 0
|
27 |
+
},
|
28 |
+
"config_tasks": {
|
29 |
+
"lighteval|hellaswag": {
|
30 |
+
"name": "hellaswag",
|
31 |
+
"prompt_function": "hellaswag_harness",
|
32 |
+
"hf_repo": "hellaswag",
|
33 |
+
"hf_subset": "default",
|
34 |
+
"metric": [
|
35 |
+
"loglikelihood_acc",
|
36 |
+
"loglikelihood_acc_norm"
|
37 |
+
],
|
38 |
+
"hf_avail_splits": [
|
39 |
+
"train",
|
40 |
+
"test",
|
41 |
+
"validation"
|
42 |
+
],
|
43 |
+
"evaluation_splits": [
|
44 |
+
"validation"
|
45 |
+
],
|
46 |
+
"few_shots_split": null,
|
47 |
+
"few_shots_select": "random_sampling_from_train",
|
48 |
+
"generation_size": -1,
|
49 |
+
"stop_sequence": [
|
50 |
+
"\n"
|
51 |
+
],
|
52 |
+
"output_regex": null,
|
53 |
+
"frozen": false,
|
54 |
+
"suite": [
|
55 |
+
"lighteval"
|
56 |
+
],
|
57 |
+
"original_num_docs": 10042,
|
58 |
+
"effective_num_docs": 10042
|
59 |
+
}
|
60 |
+
},
|
61 |
+
"summary_tasks": {
|
62 |
+
"lighteval|hellaswag|10": {
|
63 |
+
"hashes": {
|
64 |
+
"hash_examples": "31985c805c3a737e",
|
65 |
+
"hash_full_prompts": "a3c5cf19ab983927",
|
66 |
+
"hash_input_tokens": "f340cb1674dbe5ed",
|
67 |
+
"hash_cont_tokens": "cf1fd30525a7ce71"
|
68 |
+
},
|
69 |
+
"truncated": 0,
|
70 |
+
"non_truncated": 10042,
|
71 |
+
"padded": 40011,
|
72 |
+
"non_padded": 157,
|
73 |
+
"effective_few_shots": 10.0,
|
74 |
+
"num_truncated_few_shots": 0
|
75 |
+
}
|
76 |
+
},
|
77 |
+
"summary_general": {
|
78 |
+
"hashes": {
|
79 |
+
"hash_examples": "63bc2cf8bae03fbc",
|
80 |
+
"hash_full_prompts": "af0716edcec0366b",
|
81 |
+
"hash_input_tokens": "4ac9ac0520577724",
|
82 |
+
"hash_cont_tokens": "605832a579f190e3"
|
83 |
+
},
|
84 |
+
"truncated": 0,
|
85 |
+
"non_truncated": 10042,
|
86 |
+
"padded": 40011,
|
87 |
+
"non_padded": 157,
|
88 |
+
"num_truncated_few_shots": 0
|
89 |
+
}
|
90 |
+
}
|