lewtun HF staff commited on
Commit
4fa4e91
·
verified ·
1 Parent(s): a9fd15a

Upload eval_results/alignment-handbook/zephyr-2b-gemma-sft-hermes-epoch-1-block-4096/main/hellaswag/results_2024-03-05T18-09-24.149678.json with huggingface_hub

Browse files
eval_results/alignment-handbook/zephyr-2b-gemma-sft-hermes-epoch-1-block-4096/main/hellaswag/results_2024-03-05T18-09-24.149678.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "?",
4
+ "num_fewshot_seeds": 1,
5
+ "override_batch_size": 1,
6
+ "max_samples": null,
7
+ "job_id": "",
8
+ "start_time": 1922708.079670274,
9
+ "end_time": 1923138.890325496,
10
+ "total_evaluation_time_secondes": "430.81065522204153",
11
+ "model_name": "alignment-handbook/zephyr-2b-gemma-sft-hermes-epoch-1-block-4096",
12
+ "model_sha": "51b710b40b9f64b4613fc0cb97c94e8c6b2c07cb",
13
+ "model_dtype": "torch.bfloat16",
14
+ "model_size": "4.68 GB",
15
+ "config": null
16
+ },
17
+ "results": {
18
+ "lighteval|hellaswag|10": {
19
+ "acc": 0.5204142601075483,
20
+ "acc_stderr": 0.004985620773683431,
21
+ "acc_norm": 0.6933877713602868,
22
+ "acc_norm_stderr": 0.004601446124041567
23
+ }
24
+ },
25
+ "versions": {
26
+ "lighteval|hellaswag|10": 0
27
+ },
28
+ "config_tasks": {
29
+ "lighteval|hellaswag": {
30
+ "name": "hellaswag",
31
+ "prompt_function": "hellaswag_harness",
32
+ "hf_repo": "hellaswag",
33
+ "hf_subset": "default",
34
+ "metric": [
35
+ "loglikelihood_acc",
36
+ "loglikelihood_acc_norm"
37
+ ],
38
+ "hf_avail_splits": [
39
+ "train",
40
+ "test",
41
+ "validation"
42
+ ],
43
+ "evaluation_splits": [
44
+ "validation"
45
+ ],
46
+ "few_shots_split": null,
47
+ "few_shots_select": "random_sampling_from_train",
48
+ "generation_size": -1,
49
+ "stop_sequence": [
50
+ "\n"
51
+ ],
52
+ "output_regex": null,
53
+ "frozen": false,
54
+ "suite": [
55
+ "lighteval"
56
+ ],
57
+ "original_num_docs": 10042,
58
+ "effective_num_docs": 10042
59
+ }
60
+ },
61
+ "summary_tasks": {
62
+ "lighteval|hellaswag|10": {
63
+ "hashes": {
64
+ "hash_examples": "31985c805c3a737e",
65
+ "hash_full_prompts": "451e22e8c3aaeeb6",
66
+ "hash_input_tokens": "6588539749c9d962",
67
+ "hash_cont_tokens": "6232829aec74d42d"
68
+ },
69
+ "truncated": 0,
70
+ "non_truncated": 10042,
71
+ "padded": 39848,
72
+ "non_padded": 320,
73
+ "effective_few_shots": 10.0,
74
+ "num_truncated_few_shots": 0
75
+ }
76
+ },
77
+ "summary_general": {
78
+ "hashes": {
79
+ "hash_examples": "63bc2cf8bae03fbc",
80
+ "hash_full_prompts": "b2452b7405d0326a",
81
+ "hash_input_tokens": "a6c61cc900d3365f",
82
+ "hash_cont_tokens": "c2b17126697982c3"
83
+ },
84
+ "truncated": 0,
85
+ "non_truncated": 10042,
86
+ "padded": 39848,
87
+ "non_padded": 320,
88
+ "num_truncated_few_shots": 0
89
+ }
90
+ }