lewtun HF staff commited on
Commit
9199f75
·
verified ·
1 Parent(s): 47bb7d4

Upload eval_results/orpo-explorers/argilla-mistral-orpo-OpenHermesPreferences-50k-beta-0.2/main/ifeval/results_2024-05-07T20-50-02.215187.json with huggingface_hub

Browse files
eval_results/orpo-explorers/argilla-mistral-orpo-OpenHermesPreferences-50k-beta-0.2/main/ifeval/results_2024-05-07T20-50-02.215187.json ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config_general": {
3
+ "lighteval_sha": "?",
4
+ "num_fewshot_seeds": 1,
5
+ "override_batch_size": 4,
6
+ "max_samples": null,
7
+ "job_id": "",
8
+ "start_time": 510018.66643804,
9
+ "end_time": 510641.070071069,
10
+ "total_evaluation_time_secondes": "622.4036330290255",
11
+ "model_name": "orpo-explorers/argilla-mistral-orpo-OpenHermesPreferences-50k-beta-0.2",
12
+ "model_sha": "8e456b857961e00f2e1b076756e84b97a3b88ea5",
13
+ "model_dtype": "torch.bfloat16",
14
+ "model_size": "13.99 GB",
15
+ "config": null
16
+ },
17
+ "results": {
18
+ "extended|ifeval|0": {
19
+ "prompt_level_strict_acc": 0.27911275415896486,
20
+ "prompt_level_strict_acc_stderr": 0.019303080958497216,
21
+ "inst_level_strict_acc": 0.39568345323741005,
22
+ "inst_level_strict_acc_stderr": 0.0005152488668072387,
23
+ "prompt_level_loose_acc": 0.3068391866913124,
24
+ "prompt_level_loose_acc_stderr": 0.019846116338147046,
25
+ "inst_level_loose_acc": 0.420863309352518,
26
+ "inst_level_loose_acc_stderr": 0.0005182508497156708
27
+ },
28
+ "all": {
29
+ "prompt_level_strict_acc": 0.27911275415896486,
30
+ "prompt_level_strict_acc_stderr": 0.019303080958497216,
31
+ "inst_level_strict_acc": 0.39568345323741005,
32
+ "inst_level_strict_acc_stderr": 0.0005152488668072387,
33
+ "prompt_level_loose_acc": 0.3068391866913124,
34
+ "prompt_level_loose_acc_stderr": 0.019846116338147046,
35
+ "inst_level_loose_acc": 0.420863309352518,
36
+ "inst_level_loose_acc_stderr": 0.0005182508497156708
37
+ }
38
+ },
39
+ "versions": {
40
+ "extended|ifeval|0": 0
41
+ },
42
+ "config_tasks": {
43
+ "extended|ifeval": {
44
+ "name": "ifeval",
45
+ "prompt_function": "ifeval_prompt",
46
+ "hf_repo": "wis-k/instruction-following-eval",
47
+ "hf_subset": "default",
48
+ "metric": [
49
+ "ifeval_metric"
50
+ ],
51
+ "hf_avail_splits": [
52
+ "train"
53
+ ],
54
+ "evaluation_splits": [
55
+ "train"
56
+ ],
57
+ "few_shots_split": "train",
58
+ "few_shots_select": "random_sampling",
59
+ "generation_size": 1280,
60
+ "stop_sequence": [],
61
+ "output_regex": null,
62
+ "num_samples": null,
63
+ "frozen": false,
64
+ "suite": [
65
+ "extended"
66
+ ],
67
+ "original_num_docs": 541,
68
+ "effective_num_docs": 541,
69
+ "trust_dataset": null,
70
+ "must_remove_duplicate_docs": null,
71
+ "version": 0
72
+ }
73
+ },
74
+ "summary_tasks": {
75
+ "extended|ifeval|0": {
76
+ "hashes": {
77
+ "hash_examples": "e99cbf567588d7c6",
78
+ "hash_full_prompts": "46932ca113ba6b0c",
79
+ "hash_input_tokens": "8b2e0300fbfb70bc",
80
+ "hash_cont_tokens": "6fbc835ac9f0efe0"
81
+ },
82
+ "truncated": 541,
83
+ "non_truncated": 0,
84
+ "padded": 123,
85
+ "non_padded": 418,
86
+ "effective_few_shots": 0.0,
87
+ "num_truncated_few_shots": 0
88
+ }
89
+ },
90
+ "summary_general": {
91
+ "hashes": {
92
+ "hash_examples": "ea046ab2c6fc5928",
93
+ "hash_full_prompts": "8ed52f7b74eb0590",
94
+ "hash_input_tokens": "9f4b7e9e7ff95d4a",
95
+ "hash_cont_tokens": "2568308290201d32"
96
+ },
97
+ "truncated": 541,
98
+ "non_truncated": 0,
99
+ "padded": 123,
100
+ "non_padded": 418,
101
+ "num_truncated_few_shots": 0
102
+ }
103
+ }