Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
6561777
·
verified ·
1 Parent(s): 8e93229

Upload inceptionai/jais-adapted-13b-chat/results_2025-01-22T20-31-00.427582.json with huggingface_hub

Browse files
inceptionai/jais-adapted-13b-chat/results_2025-01-22T20-31-00.427582.json CHANGED
@@ -451,36 +451,36 @@
451
  "acc_norm_stderr": 0.02385932679013149
452
  },
453
  "community|aratrust:Ethics|0": {
454
- "f1": 0.5833333333333334,
455
- "f1_stderr": 0.06418399868645062
456
  },
457
  "community|aratrust:Illegal|0": {
458
- "f1": 0.8490566037735849,
459
- "f1_stderr": 0.0496447935456809
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
- "f1": 0.7894736842105263,
463
- "f1_stderr": 0.04707511531578502
464
  },
465
  "community|aratrust:Offensive|0": {
466
- "f1": 0.6376811594202898,
467
- "f1_stderr": 0.05828983090868436
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
- "f1": 0.726027397260274,
471
- "f1_stderr": 0.05256102480922988
472
  },
473
  "community|aratrust:Privacy|0": {
474
- "f1": 0.8245614035087719,
475
- "f1_stderr": 0.050825312758579586
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
- "f1": 0.6410256410256411,
479
- "f1_stderr": 0.05466685034759243
480
  },
481
  "community|aratrust:Unfairness|0": {
482
- "f1": 0.8181818181818182,
483
- "f1_stderr": 0.0524863881081478
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.45127136402929074,
@@ -499,8 +499,8 @@
499
  "acc_norm_stderr": 0.021819896372454503
500
  },
501
  "community|aratrust:_average|0": {
502
- "f1": 0.73366763008928,
503
- "f1_stderr": 0.05371666431001883
504
  },
505
  "all": {
506
  "acc_norm": 0.48458625387499904,
@@ -4797,9 +4797,9 @@
4797
  "hf_subset": "Ethics",
4798
  "metric": [
4799
  {
4800
- "metric_name": "f1",
4801
  "higher_is_better": true,
4802
- "category": "3",
4803
  "use_case": "1",
4804
  "sample_level_fn": "compute",
4805
  "corpus_level_fn": "mean"
@@ -4835,9 +4835,9 @@
4835
  "hf_subset": "Illegal",
4836
  "metric": [
4837
  {
4838
- "metric_name": "f1",
4839
  "higher_is_better": true,
4840
- "category": "3",
4841
  "use_case": "1",
4842
  "sample_level_fn": "compute",
4843
  "corpus_level_fn": "mean"
@@ -4873,9 +4873,9 @@
4873
  "hf_subset": "MentalHealth",
4874
  "metric": [
4875
  {
4876
- "metric_name": "f1",
4877
  "higher_is_better": true,
4878
- "category": "3",
4879
  "use_case": "1",
4880
  "sample_level_fn": "compute",
4881
  "corpus_level_fn": "mean"
@@ -4911,9 +4911,9 @@
4911
  "hf_subset": "Offensive",
4912
  "metric": [
4913
  {
4914
- "metric_name": "f1",
4915
  "higher_is_better": true,
4916
- "category": "3",
4917
  "use_case": "1",
4918
  "sample_level_fn": "compute",
4919
  "corpus_level_fn": "mean"
@@ -4949,9 +4949,9 @@
4949
  "hf_subset": "PhysicalHealth",
4950
  "metric": [
4951
  {
4952
- "metric_name": "f1",
4953
  "higher_is_better": true,
4954
- "category": "3",
4955
  "use_case": "1",
4956
  "sample_level_fn": "compute",
4957
  "corpus_level_fn": "mean"
@@ -4987,9 +4987,9 @@
4987
  "hf_subset": "Privacy",
4988
  "metric": [
4989
  {
4990
- "metric_name": "f1",
4991
  "higher_is_better": true,
4992
- "category": "3",
4993
  "use_case": "1",
4994
  "sample_level_fn": "compute",
4995
  "corpus_level_fn": "mean"
@@ -5025,9 +5025,9 @@
5025
  "hf_subset": "Trustfulness",
5026
  "metric": [
5027
  {
5028
- "metric_name": "f1",
5029
  "higher_is_better": true,
5030
- "category": "3",
5031
  "use_case": "1",
5032
  "sample_level_fn": "compute",
5033
  "corpus_level_fn": "mean"
@@ -5063,9 +5063,9 @@
5063
  "hf_subset": "Unfairness",
5064
  "metric": [
5065
  {
5066
- "metric_name": "f1",
5067
  "higher_is_better": true,
5068
- "category": "3",
5069
  "use_case": "1",
5070
  "sample_level_fn": "compute",
5071
  "corpus_level_fn": "mean"
@@ -6742,113 +6742,113 @@
6742
  },
6743
  "community|aratrust:Ethics|0": {
6744
  "hashes": {
6745
- "hash_examples": "b77354655caca219",
6746
- "hash_full_prompts": "af7d331099b42911",
6747
- "hash_input_tokens": "917a2a70acddabed",
6748
- "hash_cont_tokens": "d9b744f179c129f5"
6749
  },
6750
- "truncated": 60,
6751
- "non_truncated": 0,
6752
- "padded": 60,
6753
- "non_padded": 0,
6754
  "effective_few_shots": 0.0,
6755
  "num_truncated_few_shots": 0
6756
  },
6757
  "community|aratrust:Illegal|0": {
6758
  "hashes": {
6759
- "hash_examples": "daa90cfb03dd9ed8",
6760
- "hash_full_prompts": "0b0c42eaef0f0726",
6761
- "hash_input_tokens": "f69b49b225da3ae3",
6762
- "hash_cont_tokens": "593c7580c271ede5"
6763
  },
6764
- "truncated": 53,
6765
- "non_truncated": 0,
6766
- "padded": 53,
6767
- "non_padded": 0,
6768
  "effective_few_shots": 0.0,
6769
  "num_truncated_few_shots": 0
6770
  },
6771
  "community|aratrust:MentalHealth|0": {
6772
  "hashes": {
6773
- "hash_examples": "ca046355c96d95d9",
6774
- "hash_full_prompts": "8a114345da7e9d0b",
6775
- "hash_input_tokens": "388fa01687a2e9a5",
6776
- "hash_cont_tokens": "1e6f2e8b93b63baa"
6777
  },
6778
- "truncated": 76,
6779
- "non_truncated": 0,
6780
- "padded": 76,
6781
- "non_padded": 0,
6782
  "effective_few_shots": 0.0,
6783
  "num_truncated_few_shots": 0
6784
  },
6785
  "community|aratrust:Offensive|0": {
6786
  "hashes": {
6787
- "hash_examples": "6ff77d23c0f3113d",
6788
- "hash_full_prompts": "3603c0a9dbc6f320",
6789
- "hash_input_tokens": "d05ec6b2a0adb7c0",
6790
- "hash_cont_tokens": "1556b6c370bfc168"
6791
  },
6792
- "truncated": 69,
6793
- "non_truncated": 0,
6794
- "padded": 69,
6795
- "non_padded": 0,
6796
  "effective_few_shots": 0.0,
6797
  "num_truncated_few_shots": 0
6798
  },
6799
  "community|aratrust:PhysicalHealth|0": {
6800
  "hashes": {
6801
- "hash_examples": "085db2421f8abf29",
6802
- "hash_full_prompts": "aa3672a7e33ffb0d",
6803
- "hash_input_tokens": "90737c236d249014",
6804
- "hash_cont_tokens": "adb522b9d8a973a0"
6805
  },
6806
- "truncated": 73,
6807
- "non_truncated": 0,
6808
- "padded": 73,
6809
- "non_padded": 0,
6810
  "effective_few_shots": 0.0,
6811
  "num_truncated_few_shots": 0
6812
  },
6813
  "community|aratrust:Privacy|0": {
6814
  "hashes": {
6815
- "hash_examples": "78f4d16753b18c49",
6816
- "hash_full_prompts": "48d479aa4f1d86bb",
6817
- "hash_input_tokens": "e3ea198068f8ebb0",
6818
- "hash_cont_tokens": "ed5228ee8695a224"
6819
  },
6820
- "truncated": 57,
6821
- "non_truncated": 0,
6822
- "padded": 56,
6823
- "non_padded": 1,
6824
  "effective_few_shots": 0.0,
6825
  "num_truncated_few_shots": 0
6826
  },
6827
  "community|aratrust:Trustfulness|0": {
6828
  "hashes": {
6829
- "hash_examples": "373f72b4e30243c4",
6830
- "hash_full_prompts": "d1a77f73730c9224",
6831
- "hash_input_tokens": "9cfab0a151fbf7d3",
6832
- "hash_cont_tokens": "d9a4de329f34461b"
6833
  },
6834
- "truncated": 78,
6835
- "non_truncated": 0,
6836
- "padded": 78,
6837
- "non_padded": 0,
6838
  "effective_few_shots": 0.0,
6839
  "num_truncated_few_shots": 0
6840
  },
6841
  "community|aratrust:Unfairness|0": {
6842
  "hashes": {
6843
- "hash_examples": "51fa7940e42ffcc6",
6844
- "hash_full_prompts": "5e5f6562f67a9cd3",
6845
- "hash_input_tokens": "b997efda3ebcf67e",
6846
- "hash_cont_tokens": "e5bca67126e26eb8"
6847
  },
6848
- "truncated": 55,
6849
- "non_truncated": 0,
6850
- "padded": 55,
6851
- "non_padded": 0,
6852
  "effective_few_shots": 0.0,
6853
  "num_truncated_few_shots": 0
6854
  },
 
451
  "acc_norm_stderr": 0.02385932679013149
452
  },
453
  "community|aratrust:Ethics|0": {
454
+ "acc_norm": 0.7166666666666667,
455
+ "acc_norm_stderr": 0.058665318423131205
456
  },
457
  "community|aratrust:Illegal|0": {
458
+ "acc_norm": 0.8867924528301887,
459
+ "acc_norm_stderr": 0.043938680706010445
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
+ "acc_norm": 0.7894736842105263,
463
+ "acc_norm_stderr": 0.04707511531578502
464
  },
465
  "community|aratrust:Offensive|0": {
466
+ "acc_norm": 0.9420289855072463,
467
+ "acc_norm_stderr": 0.028338909017211696
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
+ "acc_norm": 0.726027397260274,
471
+ "acc_norm_stderr": 0.052561024809229875
472
  },
473
  "community|aratrust:Privacy|0": {
474
+ "acc_norm": 0.8596491228070176,
475
+ "acc_norm_stderr": 0.046416689667799806
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
+ "acc_norm": 0.6538461538461539,
479
+ "acc_norm_stderr": 0.05421594160377288
480
  },
481
  "community|aratrust:Unfairness|0": {
482
+ "acc_norm": 0.8,
483
+ "acc_norm_stderr": 0.05443310539518174
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.45127136402929074,
 
499
  "acc_norm_stderr": 0.021819896372454503
500
  },
501
  "community|aratrust:_average|0": {
502
+ "acc_norm": 0.7968105578910092,
503
+ "acc_norm_stderr": 0.04820559811726534
504
  },
505
  "all": {
506
  "acc_norm": 0.48458625387499904,
 
4797
  "hf_subset": "Ethics",
4798
  "metric": [
4799
  {
4800
+ "metric_name": "acc_norm",
4801
  "higher_is_better": true,
4802
+ "category": "8",
4803
  "use_case": "1",
4804
  "sample_level_fn": "compute",
4805
  "corpus_level_fn": "mean"
 
4835
  "hf_subset": "Illegal",
4836
  "metric": [
4837
  {
4838
+ "metric_name": "acc_norm",
4839
  "higher_is_better": true,
4840
+ "category": "8",
4841
  "use_case": "1",
4842
  "sample_level_fn": "compute",
4843
  "corpus_level_fn": "mean"
 
4873
  "hf_subset": "MentalHealth",
4874
  "metric": [
4875
  {
4876
+ "metric_name": "acc_norm",
4877
  "higher_is_better": true,
4878
+ "category": "8",
4879
  "use_case": "1",
4880
  "sample_level_fn": "compute",
4881
  "corpus_level_fn": "mean"
 
4911
  "hf_subset": "Offensive",
4912
  "metric": [
4913
  {
4914
+ "metric_name": "acc_norm",
4915
  "higher_is_better": true,
4916
+ "category": "8",
4917
  "use_case": "1",
4918
  "sample_level_fn": "compute",
4919
  "corpus_level_fn": "mean"
 
4949
  "hf_subset": "PhysicalHealth",
4950
  "metric": [
4951
  {
4952
+ "metric_name": "acc_norm",
4953
  "higher_is_better": true,
4954
+ "category": "8",
4955
  "use_case": "1",
4956
  "sample_level_fn": "compute",
4957
  "corpus_level_fn": "mean"
 
4987
  "hf_subset": "Privacy",
4988
  "metric": [
4989
  {
4990
+ "metric_name": "acc_norm",
4991
  "higher_is_better": true,
4992
+ "category": "8",
4993
  "use_case": "1",
4994
  "sample_level_fn": "compute",
4995
  "corpus_level_fn": "mean"
 
5025
  "hf_subset": "Trustfulness",
5026
  "metric": [
5027
  {
5028
+ "metric_name": "acc_norm",
5029
  "higher_is_better": true,
5030
+ "category": "8",
5031
  "use_case": "1",
5032
  "sample_level_fn": "compute",
5033
  "corpus_level_fn": "mean"
 
5063
  "hf_subset": "Unfairness",
5064
  "metric": [
5065
  {
5066
+ "metric_name": "acc_norm",
5067
  "higher_is_better": true,
5068
+ "category": "8",
5069
  "use_case": "1",
5070
  "sample_level_fn": "compute",
5071
  "corpus_level_fn": "mean"
 
6742
  },
6743
  "community|aratrust:Ethics|0": {
6744
  "hashes": {
6745
+ "hash_examples": "5d32da36271c5eb4",
6746
+ "hash_full_prompts": "b8f6fa3ef42c41ce",
6747
+ "hash_input_tokens": "d74ed802dc67b935",
6748
+ "hash_cont_tokens": "9403afe6be071fd3"
6749
  },
6750
+ "truncated": 0,
6751
+ "non_truncated": 60,
6752
+ "padded": 168,
6753
+ "non_padded": 12,
6754
  "effective_few_shots": 0.0,
6755
  "num_truncated_few_shots": 0
6756
  },
6757
  "community|aratrust:Illegal|0": {
6758
  "hashes": {
6759
+ "hash_examples": "0c07f1f100f2d0e8",
6760
+ "hash_full_prompts": "b7ea721ed8f70794",
6761
+ "hash_input_tokens": "5011201d1315a0eb",
6762
+ "hash_cont_tokens": "c4c1d582c3de428f"
6763
  },
6764
+ "truncated": 0,
6765
+ "non_truncated": 53,
6766
+ "padded": 156,
6767
+ "non_padded": 3,
6768
  "effective_few_shots": 0.0,
6769
  "num_truncated_few_shots": 0
6770
  },
6771
  "community|aratrust:MentalHealth|0": {
6772
  "hashes": {
6773
+ "hash_examples": "8e5fc5c4704bd96b",
6774
+ "hash_full_prompts": "ae5bf11d05bf5657",
6775
+ "hash_input_tokens": "cbffce4962b94481",
6776
+ "hash_cont_tokens": "ddece1b0a9316221"
6777
  },
6778
+ "truncated": 0,
6779
+ "non_truncated": 76,
6780
+ "padded": 219,
6781
+ "non_padded": 9,
6782
  "effective_few_shots": 0.0,
6783
  "num_truncated_few_shots": 0
6784
  },
6785
  "community|aratrust:Offensive|0": {
6786
  "hashes": {
6787
+ "hash_examples": "5ad4369b7dc5de46",
6788
+ "hash_full_prompts": "f8d467c3863475a3",
6789
+ "hash_input_tokens": "686a12cde7edfde4",
6790
+ "hash_cont_tokens": "a7133b9cc89b9225"
6791
  },
6792
+ "truncated": 0,
6793
+ "non_truncated": 69,
6794
+ "padded": 195,
6795
+ "non_padded": 12,
6796
  "effective_few_shots": 0.0,
6797
  "num_truncated_few_shots": 0
6798
  },
6799
  "community|aratrust:PhysicalHealth|0": {
6800
  "hashes": {
6801
+ "hash_examples": "dc2a632e2dcc86db",
6802
+ "hash_full_prompts": "0748371030695521",
6803
+ "hash_input_tokens": "c0f6ea07a9d9727e",
6804
+ "hash_cont_tokens": "638273625d8ef1dc"
6805
  },
6806
+ "truncated": 0,
6807
+ "non_truncated": 73,
6808
+ "padded": 198,
6809
+ "non_padded": 21,
6810
  "effective_few_shots": 0.0,
6811
  "num_truncated_few_shots": 0
6812
  },
6813
  "community|aratrust:Privacy|0": {
6814
  "hashes": {
6815
+ "hash_examples": "295e35448a39e003",
6816
+ "hash_full_prompts": "be65e5618e393117",
6817
+ "hash_input_tokens": "ed41c3db8ab50d8b",
6818
+ "hash_cont_tokens": "1dc5174807ee41ed"
6819
  },
6820
+ "truncated": 0,
6821
+ "non_truncated": 57,
6822
+ "padded": 150,
6823
+ "non_padded": 21,
6824
  "effective_few_shots": 0.0,
6825
  "num_truncated_few_shots": 0
6826
  },
6827
  "community|aratrust:Trustfulness|0": {
6828
  "hashes": {
6829
+ "hash_examples": "e79ac1ea5439e623",
6830
+ "hash_full_prompts": "6c43bf876dce1873",
6831
+ "hash_input_tokens": "dbd35e4c11d47397",
6832
+ "hash_cont_tokens": "cc05ef6e19e62e40"
6833
  },
6834
+ "truncated": 0,
6835
+ "non_truncated": 78,
6836
+ "padded": 213,
6837
+ "non_padded": 21,
6838
  "effective_few_shots": 0.0,
6839
  "num_truncated_few_shots": 0
6840
  },
6841
  "community|aratrust:Unfairness|0": {
6842
  "hashes": {
6843
+ "hash_examples": "4ac5dccbfbdc5077",
6844
+ "hash_full_prompts": "2b32fafb515ee9ac",
6845
+ "hash_input_tokens": "7cd8ed3e2802dcc1",
6846
+ "hash_cont_tokens": "1cf99947d87c13f3"
6847
  },
6848
+ "truncated": 0,
6849
+ "non_truncated": 55,
6850
+ "padded": 153,
6851
+ "non_padded": 12,
6852
  "effective_few_shots": 0.0,
6853
  "num_truncated_few_shots": 0
6854
  },