lewtun HF staff commited on
Commit
89286eb
·
verified ·
1 Parent(s): 994fa3e

Upload eval_results/alignment-handbook/zephyr-2b-gemma-sft-v2/main/truthfulqa/results_2024-03-05T10-48-57.914944.json with huggingface_hub

Browse files
eval_results/alignment-handbook/zephyr-2b-gemma-sft-v2/main/truthfulqa/results_2024-03-05T10-48-57.914944.json ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "?",
4
+ "num_fewshot_seeds": 1,
5
+ "override_batch_size": 1,
6
+ "max_samples": null,
7
+ "job_id": "",
8
+ "start_time": 4285636.880805286,
9
+ "end_time": 4285712.941620452,
10
+ "total_evaluation_time_secondes": "76.060815166682",
11
+ "model_name": "alignment-handbook/zephyr-2b-gemma-sft-v2",
12
+ "model_sha": "6b4a3a609c1ff3a06dbbfdc8e6f6c02000debffd",
13
+ "model_dtype": "torch.bfloat16",
14
+ "model_size": "4.68 GB",
15
+ "config": null
16
+ },
17
+ "results": {
18
+ "lighteval|truthfulqa:mc|0": {
19
+ "truthfulqa_mc1": 0.3084455324357405,
20
+ "truthfulqa_mc1_stderr": 0.01616803938315687,
21
+ "truthfulqa_mc2": 0.46456518136957553,
22
+ "truthfulqa_mc2_stderr": 0.015046687722984953
23
+ }
24
+ },
25
+ "versions": {
26
+ "lighteval|truthfulqa:mc|0": 0
27
+ },
28
+ "config_tasks": {
29
+ "lighteval|truthfulqa:mc": {
30
+ "name": "truthfulqa:mc",
31
+ "prompt_function": "truthful_qa_multiple_choice",
32
+ "hf_repo": "truthful_qa",
33
+ "hf_subset": "multiple_choice",
34
+ "metric": [
35
+ "truthfulqa_mc_metrics"
36
+ ],
37
+ "hf_avail_splits": [
38
+ "validation"
39
+ ],
40
+ "evaluation_splits": [
41
+ "validation"
42
+ ],
43
+ "few_shots_split": null,
44
+ "few_shots_select": null,
45
+ "generation_size": -1,
46
+ "stop_sequence": [
47
+ "\n"
48
+ ],
49
+ "output_regex": null,
50
+ "frozen": false,
51
+ "suite": [
52
+ "lighteval"
53
+ ],
54
+ "original_num_docs": 817,
55
+ "effective_num_docs": 817
56
+ }
57
+ },
58
+ "summary_tasks": {
59
+ "lighteval|truthfulqa:mc|0": {
60
+ "hashes": {
61
+ "hash_examples": "36a6d90e75d92d4a",
62
+ "hash_full_prompts": "17e9d0dc9f923ba3",
63
+ "hash_input_tokens": "d813d82926b925e7",
64
+ "hash_cont_tokens": "856a0c14e73e2579"
65
+ },
66
+ "truncated": 0,
67
+ "non_truncated": 817,
68
+ "padded": 9512,
69
+ "non_padded": 484,
70
+ "effective_few_shots": 0.0,
71
+ "num_truncated_few_shots": 0
72
+ }
73
+ },
74
+ "summary_general": {
75
+ "hashes": {
76
+ "hash_examples": "aed1dfc67e53d0f2",
77
+ "hash_full_prompts": "81a2e5a97bc8b7e3",
78
+ "hash_input_tokens": "20e0fd0de1485030",
79
+ "hash_cont_tokens": "05a6e7c601fa09e0"
80
+ },
81
+ "truncated": 0,
82
+ "non_truncated": 817,
83
+ "padded": 9512,
84
+ "non_padded": 484,
85
+ "num_truncated_few_shots": 0
86
+ }
87
+ }