lewtun HF staff commited on
Commit
ef306ba
·
verified ·
1 Parent(s): 1f85815

Upload eval_results/meta-llama/Llama-2-13b-chat-hf/main/truthfulqa/results_2024-03-04T21-46-13.700082.json with huggingface_hub

Browse files
eval_results/meta-llama/Llama-2-13b-chat-hf/main/truthfulqa/results_2024-03-04T21-46-13.700082.json ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "?",
4
+ "num_fewshot_seeds": 1,
5
+ "override_batch_size": 1,
6
+ "max_samples": null,
7
+ "job_id": "",
8
+ "start_time": 1534588.573615086,
9
+ "end_time": 1534753.53926857,
10
+ "total_evaluation_time_secondes": "164.96565348398872",
11
+ "model_name": "meta-llama/Llama-2-13b-chat-hf",
12
+ "model_sha": "c2f3ec81aac798ae26dcc57799a994dfbf521496",
13
+ "model_dtype": "torch.bfloat16",
14
+ "model_size": "24.32 GB",
15
+ "config": null
16
+ },
17
+ "results": {
18
+ "lighteval|truthfulqa:mc|0": {
19
+ "truthfulqa_mc1": 0.2692778457772338,
20
+ "truthfulqa_mc1_stderr": 0.01552856663708729,
21
+ "truthfulqa_mc2": 0.42762212617097595,
22
+ "truthfulqa_mc2_stderr": 0.015391942715143636
23
+ }
24
+ },
25
+ "versions": {
26
+ "lighteval|truthfulqa:mc|0": 0
27
+ },
28
+ "config_tasks": {
29
+ "lighteval|truthfulqa:mc": {
30
+ "name": "truthfulqa:mc",
31
+ "prompt_function": "truthful_qa_multiple_choice",
32
+ "hf_repo": "truthful_qa",
33
+ "hf_subset": "multiple_choice",
34
+ "metric": [
35
+ "truthfulqa_mc_metrics"
36
+ ],
37
+ "hf_avail_splits": [
38
+ "validation"
39
+ ],
40
+ "evaluation_splits": [
41
+ "validation"
42
+ ],
43
+ "few_shots_split": null,
44
+ "few_shots_select": null,
45
+ "generation_size": -1,
46
+ "stop_sequence": [
47
+ "\n"
48
+ ],
49
+ "output_regex": null,
50
+ "frozen": false,
51
+ "suite": [
52
+ "lighteval"
53
+ ],
54
+ "original_num_docs": 817,
55
+ "effective_num_docs": 817
56
+ }
57
+ },
58
+ "summary_tasks": {
59
+ "lighteval|truthfulqa:mc|0": {
60
+ "hashes": {
61
+ "hash_examples": "36a6d90e75d92d4a",
62
+ "hash_full_prompts": "bc19700dcc192702",
63
+ "hash_input_tokens": "691231bfc79d7533",
64
+ "hash_cont_tokens": "f5da56a132aab151"
65
+ },
66
+ "truncated": 0,
67
+ "non_truncated": 817,
68
+ "padded": 9623,
69
+ "non_padded": 373,
70
+ "effective_few_shots": 0.0,
71
+ "num_truncated_few_shots": 0
72
+ }
73
+ },
74
+ "summary_general": {
75
+ "hashes": {
76
+ "hash_examples": "aed1dfc67e53d0f2",
77
+ "hash_full_prompts": "046a77ce3ce1d5d3",
78
+ "hash_input_tokens": "19c9780a6766a7b6",
79
+ "hash_cont_tokens": "52845ca5a27c2b40"
80
+ },
81
+ "truncated": 0,
82
+ "non_truncated": 817,
83
+ "padded": 9623,
84
+ "non_padded": 373,
85
+ "num_truncated_few_shots": 0
86
+ }
87
+ }