Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
4b4b1f8
·
verified ·
1 Parent(s): 9b3cf7a

Upload inceptionai/jais-family-30b-16k-chat/results_2025-01-22T22-41-14.526663.json with huggingface_hub

Browse files
inceptionai/jais-family-30b-16k-chat/results_2025-01-22T22-41-14.526663.json CHANGED
@@ -451,36 +451,36 @@
451
  "acc_norm_stderr": 0.025486589299152422
452
  },
453
  "community|aratrust:Ethics|0": {
454
- "acc_norm": 0.7166666666666667,
455
- "acc_norm_stderr": 0.05866531842313122
456
  },
457
  "community|aratrust:Illegal|0": {
458
- "acc_norm": 0.8679245283018868,
459
- "acc_norm_stderr": 0.0469515997666816
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
- "acc_norm": 0.8289473684210527,
463
- "acc_norm_stderr": 0.043480820110153445
464
  },
465
  "community|aratrust:Offensive|0": {
466
- "acc_norm": 0.9420289855072463,
467
- "acc_norm_stderr": 0.0283389090172117
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
- "acc_norm": 0.7671232876712328,
471
- "acc_norm_stderr": 0.04981147084308546
472
  },
473
  "community|aratrust:Privacy|0": {
474
- "acc_norm": 0.8771929824561403,
475
- "acc_norm_stderr": 0.04385964912280701
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
- "acc_norm": 0.7435897435897436,
479
- "acc_norm_stderr": 0.0497609919747403
480
  },
481
  "community|aratrust:Unfairness|0": {
482
- "acc_norm": 0.7818181818181819,
483
- "acc_norm_stderr": 0.05620374845754972
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.581450339157303,
@@ -499,14 +499,18 @@
499
  "acc_norm_stderr": 0.022845053289706933
500
  },
501
  "community|aratrust:_average|0": {
502
- "acc_norm": 0.8156614680540188,
503
- "acc_norm_stderr": 0.047134063464420055
504
  },
505
  "all": {
506
  "acc_norm": 0.5609137371025663,
507
  "acc_norm_stderr": 0.03237627212242196,
508
  "f1": 0.7140043834529627,
509
  "f1_stderr": 0.05461741239585186
 
 
 
 
510
  }
511
  },
512
  "versions": {
@@ -626,7 +630,8 @@
626
  "community|aratrust:Trustfulness|0": 0,
627
  "community|aratrust:Unfairness|0": 0,
628
  "community|madinah_qa:Arabic Language (General)|0": 0,
629
- "community|madinah_qa:Arabic Language (Grammar)|0": 0
 
630
  },
631
  "config_tasks": {
632
  "community|alghafa:mcq_exams_test_ar": {
@@ -4792,9 +4797,9 @@
4792
  "hf_subset": "Ethics",
4793
  "metric": [
4794
  {
4795
- "metric_name": "acc_norm",
4796
  "higher_is_better": true,
4797
- "category": "8",
4798
  "use_case": "1",
4799
  "sample_level_fn": "compute",
4800
  "corpus_level_fn": "mean"
@@ -4830,9 +4835,9 @@
4830
  "hf_subset": "Illegal",
4831
  "metric": [
4832
  {
4833
- "metric_name": "acc_norm",
4834
  "higher_is_better": true,
4835
- "category": "8",
4836
  "use_case": "1",
4837
  "sample_level_fn": "compute",
4838
  "corpus_level_fn": "mean"
@@ -4868,9 +4873,9 @@
4868
  "hf_subset": "MentalHealth",
4869
  "metric": [
4870
  {
4871
- "metric_name": "acc_norm",
4872
  "higher_is_better": true,
4873
- "category": "8",
4874
  "use_case": "1",
4875
  "sample_level_fn": "compute",
4876
  "corpus_level_fn": "mean"
@@ -4906,9 +4911,9 @@
4906
  "hf_subset": "Offensive",
4907
  "metric": [
4908
  {
4909
- "metric_name": "acc_norm",
4910
  "higher_is_better": true,
4911
- "category": "8",
4912
  "use_case": "1",
4913
  "sample_level_fn": "compute",
4914
  "corpus_level_fn": "mean"
@@ -4944,9 +4949,9 @@
4944
  "hf_subset": "PhysicalHealth",
4945
  "metric": [
4946
  {
4947
- "metric_name": "acc_norm",
4948
  "higher_is_better": true,
4949
- "category": "8",
4950
  "use_case": "1",
4951
  "sample_level_fn": "compute",
4952
  "corpus_level_fn": "mean"
@@ -4982,9 +4987,9 @@
4982
  "hf_subset": "Privacy",
4983
  "metric": [
4984
  {
4985
- "metric_name": "acc_norm",
4986
  "higher_is_better": true,
4987
- "category": "8",
4988
  "use_case": "1",
4989
  "sample_level_fn": "compute",
4990
  "corpus_level_fn": "mean"
@@ -5020,9 +5025,9 @@
5020
  "hf_subset": "Trustfulness",
5021
  "metric": [
5022
  {
5023
- "metric_name": "acc_norm",
5024
  "higher_is_better": true,
5025
- "category": "8",
5026
  "use_case": "1",
5027
  "sample_level_fn": "compute",
5028
  "corpus_level_fn": "mean"
@@ -5058,9 +5063,9 @@
5058
  "hf_subset": "Unfairness",
5059
  "metric": [
5060
  {
5061
- "metric_name": "acc_norm",
5062
  "higher_is_better": true,
5063
- "category": "8",
5064
  "use_case": "1",
5065
  "sample_level_fn": "compute",
5066
  "corpus_level_fn": "mean"
@@ -5168,6 +5173,44 @@
5168
  "effective_num_docs": 365,
5169
  "must_remove_duplicate_docs": false,
5170
  "version": 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5171
  }
5172
  },
5173
  "summary_tasks": {
@@ -6699,113 +6742,127 @@
6699
  },
6700
  "community|aratrust:Ethics|0": {
6701
  "hashes": {
6702
- "hash_examples": "5d32da36271c5eb4",
6703
- "hash_full_prompts": "b8f6fa3ef42c41ce",
6704
- "hash_input_tokens": "8db0725b76ef2d00",
6705
- "hash_cont_tokens": "a61c88e6874ea0f3"
6706
  },
6707
- "truncated": 0,
6708
- "non_truncated": 60,
6709
- "padded": 180,
6710
  "non_padded": 0,
6711
  "effective_few_shots": 0.0,
6712
  "num_truncated_few_shots": 0
6713
  },
6714
  "community|aratrust:Illegal|0": {
6715
  "hashes": {
6716
- "hash_examples": "0c07f1f100f2d0e8",
6717
- "hash_full_prompts": "b7ea721ed8f70794",
6718
- "hash_input_tokens": "140d4e88f96a1d33",
6719
- "hash_cont_tokens": "ce02a24a3b6deaf2"
6720
  },
6721
- "truncated": 0,
6722
- "non_truncated": 53,
6723
- "padded": 159,
6724
  "non_padded": 0,
6725
  "effective_few_shots": 0.0,
6726
  "num_truncated_few_shots": 0
6727
  },
6728
  "community|aratrust:MentalHealth|0": {
6729
  "hashes": {
6730
- "hash_examples": "8e5fc5c4704bd96b",
6731
- "hash_full_prompts": "ae5bf11d05bf5657",
6732
- "hash_input_tokens": "284f580fd98d95b9",
6733
- "hash_cont_tokens": "507159565a90ee58"
6734
  },
6735
- "truncated": 0,
6736
- "non_truncated": 76,
6737
- "padded": 225,
6738
- "non_padded": 3,
6739
  "effective_few_shots": 0.0,
6740
  "num_truncated_few_shots": 0
6741
  },
6742
  "community|aratrust:Offensive|0": {
6743
  "hashes": {
6744
- "hash_examples": "5ad4369b7dc5de46",
6745
- "hash_full_prompts": "f8d467c3863475a3",
6746
- "hash_input_tokens": "b34a149c06c9eef8",
6747
- "hash_cont_tokens": "0dd1d1830ca63474"
6748
  },
6749
- "truncated": 0,
6750
- "non_truncated": 69,
6751
- "padded": 204,
6752
- "non_padded": 3,
6753
  "effective_few_shots": 0.0,
6754
  "num_truncated_few_shots": 0
6755
  },
6756
  "community|aratrust:PhysicalHealth|0": {
6757
  "hashes": {
6758
- "hash_examples": "dc2a632e2dcc86db",
6759
- "hash_full_prompts": "0748371030695521",
6760
- "hash_input_tokens": "610aa2f54b2f56b7",
6761
- "hash_cont_tokens": "2d67dec7c6bea675"
6762
  },
6763
- "truncated": 0,
6764
- "non_truncated": 73,
6765
- "padded": 210,
6766
- "non_padded": 9,
6767
  "effective_few_shots": 0.0,
6768
  "num_truncated_few_shots": 0
6769
  },
6770
  "community|aratrust:Privacy|0": {
6771
  "hashes": {
6772
- "hash_examples": "295e35448a39e003",
6773
- "hash_full_prompts": "be65e5618e393117",
6774
- "hash_input_tokens": "bc23f70a764ff5dc",
6775
- "hash_cont_tokens": "e84f7d85ef83afe0"
6776
  },
6777
- "truncated": 0,
6778
- "non_truncated": 57,
6779
- "padded": 162,
6780
- "non_padded": 9,
6781
  "effective_few_shots": 0.0,
6782
  "num_truncated_few_shots": 0
6783
  },
6784
  "community|aratrust:Trustfulness|0": {
6785
  "hashes": {
6786
- "hash_examples": "e79ac1ea5439e623",
6787
- "hash_full_prompts": "6c43bf876dce1873",
6788
- "hash_input_tokens": "ba98276b68951db8",
6789
- "hash_cont_tokens": "8accfcabf2f5f70d"
6790
  },
6791
- "truncated": 0,
6792
- "non_truncated": 78,
6793
- "padded": 234,
6794
  "non_padded": 0,
6795
  "effective_few_shots": 0.0,
6796
  "num_truncated_few_shots": 0
6797
  },
6798
  "community|aratrust:Unfairness|0": {
6799
  "hashes": {
6800
- "hash_examples": "4ac5dccbfbdc5077",
6801
- "hash_full_prompts": "2b32fafb515ee9ac",
6802
- "hash_input_tokens": "2f5ea99333534049",
6803
- "hash_cont_tokens": "03a04d63241fa57b"
6804
  },
6805
- "truncated": 0,
6806
- "non_truncated": 55,
6807
- "padded": 162,
6808
- "non_padded": 3,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6809
  "effective_few_shots": 0.0,
6810
  "num_truncated_few_shots": 0
6811
  }
 
451
  "acc_norm_stderr": 0.025486589299152422
452
  },
453
  "community|aratrust:Ethics|0": {
454
+ "f1": 0.43333333333333335,
455
+ "f1_stderr": 0.0645132433593152
456
  },
457
  "community|aratrust:Illegal|0": {
458
+ "f1": 0.8113207547169812,
459
+ "f1_stderr": 0.054257113621244545
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
+ "f1": 0.7763157894736842,
463
+ "f1_stderr": 0.048117853164570124
464
  },
465
  "community|aratrust:Offensive|0": {
466
+ "f1": 0.855072463768116,
467
+ "f1_stderr": 0.04268963668731212
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
+ "f1": 0.684931506849315,
471
+ "f1_stderr": 0.05474693519763382
472
  },
473
  "community|aratrust:Privacy|0": {
474
+ "f1": 0.7368421052631579,
475
+ "f1_stderr": 0.058843894144731304
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
+ "f1": 0.7051282051282052,
479
+ "f1_stderr": 0.05196432585754983
480
  },
481
  "community|aratrust:Unfairness|0": {
482
+ "f1": 0.7090909090909091,
483
+ "f1_stderr": 0.06180629713445796
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.581450339157303,
 
499
  "acc_norm_stderr": 0.022845053289706933
500
  },
501
  "community|aratrust:_average|0": {
502
+ "f1": 0.7140043834529627,
503
+ "f1_stderr": 0.05461741239585186
504
  },
505
  "all": {
506
  "acc_norm": 0.5609137371025663,
507
  "acc_norm_stderr": 0.03237627212242196,
508
  "f1": 0.7140043834529627,
509
  "f1_stderr": 0.05461741239585186
510
+ },
511
+ "community|alrage_qa|0": {
512
+ "llm_as_judge": 0.7495251661918293,
513
+ "llm_as_judge_stderr": 0.00013008780225733525
514
  }
515
  },
516
  "versions": {
 
630
  "community|aratrust:Trustfulness|0": 0,
631
  "community|aratrust:Unfairness|0": 0,
632
  "community|madinah_qa:Arabic Language (General)|0": 0,
633
+ "community|madinah_qa:Arabic Language (Grammar)|0": 0,
634
+ "community|alrage_qa|0": 0
635
  },
636
  "config_tasks": {
637
  "community|alghafa:mcq_exams_test_ar": {
 
4797
  "hf_subset": "Ethics",
4798
  "metric": [
4799
  {
4800
+ "metric_name": "f1",
4801
  "higher_is_better": true,
4802
+ "category": "3",
4803
  "use_case": "1",
4804
  "sample_level_fn": "compute",
4805
  "corpus_level_fn": "mean"
 
4835
  "hf_subset": "Illegal",
4836
  "metric": [
4837
  {
4838
+ "metric_name": "f1",
4839
  "higher_is_better": true,
4840
+ "category": "3",
4841
  "use_case": "1",
4842
  "sample_level_fn": "compute",
4843
  "corpus_level_fn": "mean"
 
4873
  "hf_subset": "MentalHealth",
4874
  "metric": [
4875
  {
4876
+ "metric_name": "f1",
4877
  "higher_is_better": true,
4878
+ "category": "3",
4879
  "use_case": "1",
4880
  "sample_level_fn": "compute",
4881
  "corpus_level_fn": "mean"
 
4911
  "hf_subset": "Offensive",
4912
  "metric": [
4913
  {
4914
+ "metric_name": "f1",
4915
  "higher_is_better": true,
4916
+ "category": "3",
4917
  "use_case": "1",
4918
  "sample_level_fn": "compute",
4919
  "corpus_level_fn": "mean"
 
4949
  "hf_subset": "PhysicalHealth",
4950
  "metric": [
4951
  {
4952
+ "metric_name": "f1",
4953
  "higher_is_better": true,
4954
+ "category": "3",
4955
  "use_case": "1",
4956
  "sample_level_fn": "compute",
4957
  "corpus_level_fn": "mean"
 
4987
  "hf_subset": "Privacy",
4988
  "metric": [
4989
  {
4990
+ "metric_name": "f1",
4991
  "higher_is_better": true,
4992
+ "category": "3",
4993
  "use_case": "1",
4994
  "sample_level_fn": "compute",
4995
  "corpus_level_fn": "mean"
 
5025
  "hf_subset": "Trustfulness",
5026
  "metric": [
5027
  {
5028
+ "metric_name": "f1",
5029
  "higher_is_better": true,
5030
+ "category": "3",
5031
  "use_case": "1",
5032
  "sample_level_fn": "compute",
5033
  "corpus_level_fn": "mean"
 
5063
  "hf_subset": "Unfairness",
5064
  "metric": [
5065
  {
5066
+ "metric_name": "f1",
5067
  "higher_is_better": true,
5068
+ "category": "3",
5069
  "use_case": "1",
5070
  "sample_level_fn": "compute",
5071
  "corpus_level_fn": "mean"
 
5173
  "effective_num_docs": 365,
5174
  "must_remove_duplicate_docs": false,
5175
  "version": 0
5176
+ },
5177
+ "community|alrage_qa": {
5178
+ "name": "alrage_qa",
5179
+ "prompt_function": "qa_prompt_arabic",
5180
+ "hf_repo": "OALL/ALRAGE",
5181
+ "hf_subset": null,
5182
+ "metric": [
5183
+ {
5184
+ "metric_name": "llm_as_judge",
5185
+ "higher_is_better": true,
5186
+ "category": "7",
5187
+ "use_case": "10",
5188
+ "sample_level_fn": "_sample_level_fn",
5189
+ "corpus_level_fn": "aggregate_scores"
5190
+ }
5191
+ ],
5192
+ "hf_revision": null,
5193
+ "hf_filter": null,
5194
+ "hf_avail_splits": [
5195
+ "train"
5196
+ ],
5197
+ "trust_dataset": true,
5198
+ "evaluation_splits": [
5199
+ "train"
5200
+ ],
5201
+ "few_shots_split": null,
5202
+ "few_shots_select": null,
5203
+ "generation_size": 200,
5204
+ "generation_grammar": null,
5205
+ "stop_sequence": [],
5206
+ "num_samples": null,
5207
+ "suite": [
5208
+ "community"
5209
+ ],
5210
+ "original_num_docs": 2106,
5211
+ "effective_num_docs": 2106,
5212
+ "must_remove_duplicate_docs": false,
5213
+ "version": 0
5214
  }
5215
  },
5216
  "summary_tasks": {
 
6742
  },
6743
  "community|aratrust:Ethics|0": {
6744
  "hashes": {
6745
+ "hash_examples": "b77354655caca219",
6746
+ "hash_full_prompts": "af7d331099b42911",
6747
+ "hash_input_tokens": "67046b29a2e5dde1",
6748
+ "hash_cont_tokens": "be07d30bef72cb1a"
6749
  },
6750
+ "truncated": 60,
6751
+ "non_truncated": 0,
6752
+ "padded": 60,
6753
  "non_padded": 0,
6754
  "effective_few_shots": 0.0,
6755
  "num_truncated_few_shots": 0
6756
  },
6757
  "community|aratrust:Illegal|0": {
6758
  "hashes": {
6759
+ "hash_examples": "daa90cfb03dd9ed8",
6760
+ "hash_full_prompts": "0b0c42eaef0f0726",
6761
+ "hash_input_tokens": "ce76e15070b3b478",
6762
+ "hash_cont_tokens": "3d8b6037267d9712"
6763
  },
6764
+ "truncated": 53,
6765
+ "non_truncated": 0,
6766
+ "padded": 53,
6767
  "non_padded": 0,
6768
  "effective_few_shots": 0.0,
6769
  "num_truncated_few_shots": 0
6770
  },
6771
  "community|aratrust:MentalHealth|0": {
6772
  "hashes": {
6773
+ "hash_examples": "ca046355c96d95d9",
6774
+ "hash_full_prompts": "8a114345da7e9d0b",
6775
+ "hash_input_tokens": "49c1a75a292c7bb6",
6776
+ "hash_cont_tokens": "7d2997ee120b5a10"
6777
  },
6778
+ "truncated": 76,
6779
+ "non_truncated": 0,
6780
+ "padded": 76,
6781
+ "non_padded": 0,
6782
  "effective_few_shots": 0.0,
6783
  "num_truncated_few_shots": 0
6784
  },
6785
  "community|aratrust:Offensive|0": {
6786
  "hashes": {
6787
+ "hash_examples": "6ff77d23c0f3113d",
6788
+ "hash_full_prompts": "3603c0a9dbc6f320",
6789
+ "hash_input_tokens": "f6c433f73e7dc97f",
6790
+ "hash_cont_tokens": "041e08005b7711de"
6791
  },
6792
+ "truncated": 69,
6793
+ "non_truncated": 0,
6794
+ "padded": 69,
6795
+ "non_padded": 0,
6796
  "effective_few_shots": 0.0,
6797
  "num_truncated_few_shots": 0
6798
  },
6799
  "community|aratrust:PhysicalHealth|0": {
6800
  "hashes": {
6801
+ "hash_examples": "085db2421f8abf29",
6802
+ "hash_full_prompts": "aa3672a7e33ffb0d",
6803
+ "hash_input_tokens": "b63b34d83eca2a52",
6804
+ "hash_cont_tokens": "778ef9b3aa266caf"
6805
  },
6806
+ "truncated": 73,
6807
+ "non_truncated": 0,
6808
+ "padded": 73,
6809
+ "non_padded": 0,
6810
  "effective_few_shots": 0.0,
6811
  "num_truncated_few_shots": 0
6812
  },
6813
  "community|aratrust:Privacy|0": {
6814
  "hashes": {
6815
+ "hash_examples": "78f4d16753b18c49",
6816
+ "hash_full_prompts": "48d479aa4f1d86bb",
6817
+ "hash_input_tokens": "5adb23f0c5b09bd7",
6818
+ "hash_cont_tokens": "478a1512c7ee45de"
6819
  },
6820
+ "truncated": 57,
6821
+ "non_truncated": 0,
6822
+ "padded": 56,
6823
+ "non_padded": 1,
6824
  "effective_few_shots": 0.0,
6825
  "num_truncated_few_shots": 0
6826
  },
6827
  "community|aratrust:Trustfulness|0": {
6828
  "hashes": {
6829
+ "hash_examples": "373f72b4e30243c4",
6830
+ "hash_full_prompts": "d1a77f73730c9224",
6831
+ "hash_input_tokens": "d3eee498eeaab0e4",
6832
+ "hash_cont_tokens": "19ddd3d90dc805bc"
6833
  },
6834
+ "truncated": 78,
6835
+ "non_truncated": 0,
6836
+ "padded": 78,
6837
  "non_padded": 0,
6838
  "effective_few_shots": 0.0,
6839
  "num_truncated_few_shots": 0
6840
  },
6841
  "community|aratrust:Unfairness|0": {
6842
  "hashes": {
6843
+ "hash_examples": "51fa7940e42ffcc6",
6844
+ "hash_full_prompts": "5e5f6562f67a9cd3",
6845
+ "hash_input_tokens": "ed6d8a8581b63421",
6846
+ "hash_cont_tokens": "9f57ea592059f8dd"
6847
  },
6848
+ "truncated": 55,
6849
+ "non_truncated": 0,
6850
+ "padded": 55,
6851
+ "non_padded": 0,
6852
+ "effective_few_shots": 0.0,
6853
+ "num_truncated_few_shots": 0
6854
+ },
6855
+ "community|alrage_qa|0": {
6856
+ "hashes": {
6857
+ "hash_examples": "3edbbe22cabd4160",
6858
+ "hash_full_prompts": "74185f3f41a6360c",
6859
+ "hash_input_tokens": "5d28fcbd10c8ba5e",
6860
+ "hash_cont_tokens": "b13f7e16969d831a"
6861
+ },
6862
+ "truncated": 2106,
6863
+ "non_truncated": 0,
6864
+ "padded": 2106,
6865
+ "non_padded": 0,
6866
  "effective_few_shots": 0.0,
6867
  "num_truncated_few_shots": 0
6868
  }