Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
19559c7
·
verified ·
1 Parent(s): 690c54f

Upload airev-ai/emirati-14b-v2/results_2025-01-19T15-03-37.580700.json with huggingface_hub

Browse files
airev-ai/emirati-14b-v2/results_2025-01-19T15-03-37.580700.json CHANGED
@@ -451,36 +451,36 @@
451
  "acc_norm_stderr": 0.024622384500627866
452
  },
453
  "community|aratrust:Ethics|0": {
454
- "f1": 0.0,
455
- "f1_stderr": 0.0
456
  },
457
  "community|aratrust:Illegal|0": {
458
- "f1": 0.0,
459
- "f1_stderr": 0.0
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
- "f1": 0.0,
463
- "f1_stderr": 0.0
464
  },
465
  "community|aratrust:Offensive|0": {
466
- "f1": 0.0,
467
- "f1_stderr": 0.0
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
- "f1": 0.0,
471
- "f1_stderr": 0.0
472
  },
473
  "community|aratrust:Privacy|0": {
474
- "f1": 0.0,
475
- "f1_stderr": 0.0
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
- "f1": 0.0,
479
- "f1_stderr": 0.0
480
  },
481
  "community|aratrust:Unfairness|0": {
482
- "f1": 0.0,
483
- "f1_stderr": 0.0
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.5202669965259062,
@@ -499,18 +499,14 @@
499
  "acc_norm_stderr": 0.022045951361100775
500
  },
501
  "community|aratrust:_average|0": {
502
- "f1": 0.0,
503
- "f1_stderr": 0.0
504
  },
505
  "all": {
506
  "acc_norm": 0.5233619051657347,
507
  "acc_norm_stderr": 0.033052055252515235,
508
  "f1": 0.0,
509
  "f1_stderr": 0.0
510
- },
511
- "community|alrage_qa|0": {
512
- "llm_as_judge": 0.7723171889838458,
513
- "llm_as_judge_stderr": 0.00010093858300484214
514
  }
515
  },
516
  "versions": {
@@ -630,8 +626,7 @@
630
  "community|aratrust:Trustfulness|0": 0,
631
  "community|aratrust:Unfairness|0": 0,
632
  "community|madinah_qa:Arabic Language (General)|0": 0,
633
- "community|madinah_qa:Arabic Language (Grammar)|0": 0,
634
- "community|alrage_qa|0": 0
635
  },
636
  "config_tasks": {
637
  "community|alghafa:mcq_exams_test_ar": {
@@ -4797,9 +4792,9 @@
4797
  "hf_subset": "Ethics",
4798
  "metric": [
4799
  {
4800
- "metric_name": "f1",
4801
  "higher_is_better": true,
4802
- "category": "3",
4803
  "use_case": "1",
4804
  "sample_level_fn": "compute",
4805
  "corpus_level_fn": "mean"
@@ -4835,9 +4830,9 @@
4835
  "hf_subset": "Illegal",
4836
  "metric": [
4837
  {
4838
- "metric_name": "f1",
4839
  "higher_is_better": true,
4840
- "category": "3",
4841
  "use_case": "1",
4842
  "sample_level_fn": "compute",
4843
  "corpus_level_fn": "mean"
@@ -4873,9 +4868,9 @@
4873
  "hf_subset": "MentalHealth",
4874
  "metric": [
4875
  {
4876
- "metric_name": "f1",
4877
  "higher_is_better": true,
4878
- "category": "3",
4879
  "use_case": "1",
4880
  "sample_level_fn": "compute",
4881
  "corpus_level_fn": "mean"
@@ -4911,9 +4906,9 @@
4911
  "hf_subset": "Offensive",
4912
  "metric": [
4913
  {
4914
- "metric_name": "f1",
4915
  "higher_is_better": true,
4916
- "category": "3",
4917
  "use_case": "1",
4918
  "sample_level_fn": "compute",
4919
  "corpus_level_fn": "mean"
@@ -4949,9 +4944,9 @@
4949
  "hf_subset": "PhysicalHealth",
4950
  "metric": [
4951
  {
4952
- "metric_name": "f1",
4953
  "higher_is_better": true,
4954
- "category": "3",
4955
  "use_case": "1",
4956
  "sample_level_fn": "compute",
4957
  "corpus_level_fn": "mean"
@@ -4987,9 +4982,9 @@
4987
  "hf_subset": "Privacy",
4988
  "metric": [
4989
  {
4990
- "metric_name": "f1",
4991
  "higher_is_better": true,
4992
- "category": "3",
4993
  "use_case": "1",
4994
  "sample_level_fn": "compute",
4995
  "corpus_level_fn": "mean"
@@ -5025,9 +5020,9 @@
5025
  "hf_subset": "Trustfulness",
5026
  "metric": [
5027
  {
5028
- "metric_name": "f1",
5029
  "higher_is_better": true,
5030
- "category": "3",
5031
  "use_case": "1",
5032
  "sample_level_fn": "compute",
5033
  "corpus_level_fn": "mean"
@@ -5063,9 +5058,9 @@
5063
  "hf_subset": "Unfairness",
5064
  "metric": [
5065
  {
5066
- "metric_name": "f1",
5067
  "higher_is_better": true,
5068
- "category": "3",
5069
  "use_case": "1",
5070
  "sample_level_fn": "compute",
5071
  "corpus_level_fn": "mean"
@@ -5173,44 +5168,6 @@
5173
  "effective_num_docs": 365,
5174
  "must_remove_duplicate_docs": false,
5175
  "version": 0
5176
- },
5177
- "community|alrage_qa": {
5178
- "name": "alrage_qa",
5179
- "prompt_function": "qa_prompt_arabic",
5180
- "hf_repo": "OALL/ALRAGE",
5181
- "hf_subset": null,
5182
- "metric": [
5183
- {
5184
- "metric_name": "llm_as_judge",
5185
- "higher_is_better": true,
5186
- "category": "7",
5187
- "use_case": "10",
5188
- "sample_level_fn": "_sample_level_fn",
5189
- "corpus_level_fn": "aggregate_scores"
5190
- }
5191
- ],
5192
- "hf_revision": null,
5193
- "hf_filter": null,
5194
- "hf_avail_splits": [
5195
- "train"
5196
- ],
5197
- "trust_dataset": true,
5198
- "evaluation_splits": [
5199
- "train"
5200
- ],
5201
- "few_shots_split": null,
5202
- "few_shots_select": null,
5203
- "generation_size": 200,
5204
- "generation_grammar": null,
5205
- "stop_sequence": [],
5206
- "num_samples": null,
5207
- "suite": [
5208
- "community"
5209
- ],
5210
- "original_num_docs": 2106,
5211
- "effective_num_docs": 2106,
5212
- "must_remove_duplicate_docs": false,
5213
- "version": 0
5214
  }
5215
  },
5216
  "summary_tasks": {
@@ -6742,127 +6699,113 @@
6742
  },
6743
  "community|aratrust:Ethics|0": {
6744
  "hashes": {
6745
- "hash_examples": "b77354655caca219",
6746
- "hash_full_prompts": "3b6c35af9dd55ba5",
6747
- "hash_input_tokens": "3f497e926dd77d76",
6748
- "hash_cont_tokens": "ef0aae761dbc9563"
6749
  },
6750
- "truncated": 36,
6751
- "non_truncated": 24,
6752
- "padded": 60,
6753
  "non_padded": 0,
6754
  "effective_few_shots": 0.0,
6755
  "num_truncated_few_shots": 0
6756
  },
6757
  "community|aratrust:Illegal|0": {
6758
  "hashes": {
6759
- "hash_examples": "daa90cfb03dd9ed8",
6760
- "hash_full_prompts": "a1a797d500db526f",
6761
- "hash_input_tokens": "28c17df2a02b1a45",
6762
- "hash_cont_tokens": "4483294cec3d3f66"
6763
  },
6764
- "truncated": 37,
6765
- "non_truncated": 16,
6766
- "padded": 53,
6767
  "non_padded": 0,
6768
  "effective_few_shots": 0.0,
6769
  "num_truncated_few_shots": 0
6770
  },
6771
  "community|aratrust:MentalHealth|0": {
6772
  "hashes": {
6773
- "hash_examples": "ca046355c96d95d9",
6774
- "hash_full_prompts": "f0b0bf58012fc511",
6775
- "hash_input_tokens": "c98394a996e20c73",
6776
- "hash_cont_tokens": "da1ffd9a9c460626"
6777
  },
6778
- "truncated": 59,
6779
- "non_truncated": 17,
6780
- "padded": 76,
6781
  "non_padded": 0,
6782
  "effective_few_shots": 0.0,
6783
  "num_truncated_few_shots": 0
6784
  },
6785
  "community|aratrust:Offensive|0": {
6786
  "hashes": {
6787
- "hash_examples": "6ff77d23c0f3113d",
6788
- "hash_full_prompts": "e44f96f8e6f119af",
6789
- "hash_input_tokens": "6764872aaa2b5eec",
6790
- "hash_cont_tokens": "6483d1cf3b353e67"
6791
  },
6792
- "truncated": 40,
6793
- "non_truncated": 29,
6794
- "padded": 69,
6795
  "non_padded": 0,
6796
  "effective_few_shots": 0.0,
6797
  "num_truncated_few_shots": 0
6798
  },
6799
  "community|aratrust:PhysicalHealth|0": {
6800
  "hashes": {
6801
- "hash_examples": "085db2421f8abf29",
6802
- "hash_full_prompts": "76b6bac76dbaf06c",
6803
- "hash_input_tokens": "4bb4cbf155e95693",
6804
- "hash_cont_tokens": "e5f97158f7463249"
6805
  },
6806
- "truncated": 64,
6807
- "non_truncated": 9,
6808
- "padded": 73,
6809
- "non_padded": 0,
6810
  "effective_few_shots": 0.0,
6811
  "num_truncated_few_shots": 0
6812
  },
6813
  "community|aratrust:Privacy|0": {
6814
  "hashes": {
6815
- "hash_examples": "78f4d16753b18c49",
6816
- "hash_full_prompts": "63c7263b4ad8a155",
6817
- "hash_input_tokens": "97fef45f53c0229d",
6818
- "hash_cont_tokens": "43ec4780893c5173"
6819
  },
6820
- "truncated": 45,
6821
- "non_truncated": 12,
6822
- "padded": 56,
6823
- "non_padded": 1,
6824
  "effective_few_shots": 0.0,
6825
  "num_truncated_few_shots": 0
6826
  },
6827
  "community|aratrust:Trustfulness|0": {
6828
  "hashes": {
6829
- "hash_examples": "373f72b4e30243c4",
6830
- "hash_full_prompts": "9b519b28f08eebb6",
6831
- "hash_input_tokens": "3dba8d8a5c44be92",
6832
- "hash_cont_tokens": "b2d635832a4abcee"
6833
  },
6834
- "truncated": 2,
6835
- "non_truncated": 76,
6836
- "padded": 78,
6837
- "non_padded": 0,
6838
  "effective_few_shots": 0.0,
6839
  "num_truncated_few_shots": 0
6840
  },
6841
  "community|aratrust:Unfairness|0": {
6842
  "hashes": {
6843
- "hash_examples": "51fa7940e42ffcc6",
6844
- "hash_full_prompts": "d5ca4d44e29290d9",
6845
- "hash_input_tokens": "42cf3a01f02df209",
6846
- "hash_cont_tokens": "99b79a2a6913b126"
6847
- },
6848
- "truncated": 41,
6849
- "non_truncated": 14,
6850
- "padded": 55,
6851
- "non_padded": 0,
6852
- "effective_few_shots": 0.0,
6853
- "num_truncated_few_shots": 0
6854
- },
6855
- "community|alrage_qa|0": {
6856
- "hashes": {
6857
- "hash_examples": "3edbbe22cabd4160",
6858
- "hash_full_prompts": "bbeddba8b85e29c5",
6859
- "hash_input_tokens": "dbccd73d9313a2b1",
6860
- "hash_cont_tokens": "69389a79cd2c0fb4"
6861
  },
6862
- "truncated": 2106,
6863
- "non_truncated": 0,
6864
- "padded": 2106,
6865
- "non_padded": 0,
6866
  "effective_few_shots": 0.0,
6867
  "num_truncated_few_shots": 0
6868
  }
 
451
  "acc_norm_stderr": 0.024622384500627866
452
  },
453
  "community|aratrust:Ethics|0": {
454
+ "acc_norm": 0.7333333333333333,
455
+ "acc_norm_stderr": 0.057571702611783085
456
  },
457
  "community|aratrust:Illegal|0": {
458
+ "acc_norm": 0.8301886792452831,
459
+ "acc_norm_stderr": 0.05206789873629053
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
+ "acc_norm": 0.9210526315789473,
463
+ "acc_norm_stderr": 0.03113726201631373
464
  },
465
  "community|aratrust:Offensive|0": {
466
+ "acc_norm": 0.7971014492753623,
467
+ "acc_norm_stderr": 0.04876877147472662
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
+ "acc_norm": 0.821917808219178,
471
+ "acc_norm_stderr": 0.04508771154824544
472
  },
473
  "community|aratrust:Privacy|0": {
474
+ "acc_norm": 0.9122807017543859,
475
+ "acc_norm_stderr": 0.0378022634811989
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
+ "acc_norm": 0.7435897435897436,
479
+ "acc_norm_stderr": 0.0497609919747403
480
  },
481
  "community|aratrust:Unfairness|0": {
482
+ "acc_norm": 0.7818181818181819,
483
+ "acc_norm_stderr": 0.05620374845754972
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.5202669965259062,
 
499
  "acc_norm_stderr": 0.022045951361100775
500
  },
501
  "community|aratrust:_average|0": {
502
+ "acc_norm": 0.817660316101802,
503
+ "acc_norm_stderr": 0.04730004378760604
504
  },
505
  "all": {
506
  "acc_norm": 0.5233619051657347,
507
  "acc_norm_stderr": 0.033052055252515235,
508
  "f1": 0.0,
509
  "f1_stderr": 0.0
 
 
 
 
510
  }
511
  },
512
  "versions": {
 
626
  "community|aratrust:Trustfulness|0": 0,
627
  "community|aratrust:Unfairness|0": 0,
628
  "community|madinah_qa:Arabic Language (General)|0": 0,
629
+ "community|madinah_qa:Arabic Language (Grammar)|0": 0
 
630
  },
631
  "config_tasks": {
632
  "community|alghafa:mcq_exams_test_ar": {
 
4792
  "hf_subset": "Ethics",
4793
  "metric": [
4794
  {
4795
+ "metric_name": "acc_norm",
4796
  "higher_is_better": true,
4797
+ "category": "8",
4798
  "use_case": "1",
4799
  "sample_level_fn": "compute",
4800
  "corpus_level_fn": "mean"
 
4830
  "hf_subset": "Illegal",
4831
  "metric": [
4832
  {
4833
+ "metric_name": "acc_norm",
4834
  "higher_is_better": true,
4835
+ "category": "8",
4836
  "use_case": "1",
4837
  "sample_level_fn": "compute",
4838
  "corpus_level_fn": "mean"
 
4868
  "hf_subset": "MentalHealth",
4869
  "metric": [
4870
  {
4871
+ "metric_name": "acc_norm",
4872
  "higher_is_better": true,
4873
+ "category": "8",
4874
  "use_case": "1",
4875
  "sample_level_fn": "compute",
4876
  "corpus_level_fn": "mean"
 
4906
  "hf_subset": "Offensive",
4907
  "metric": [
4908
  {
4909
+ "metric_name": "acc_norm",
4910
  "higher_is_better": true,
4911
+ "category": "8",
4912
  "use_case": "1",
4913
  "sample_level_fn": "compute",
4914
  "corpus_level_fn": "mean"
 
4944
  "hf_subset": "PhysicalHealth",
4945
  "metric": [
4946
  {
4947
+ "metric_name": "acc_norm",
4948
  "higher_is_better": true,
4949
+ "category": "8",
4950
  "use_case": "1",
4951
  "sample_level_fn": "compute",
4952
  "corpus_level_fn": "mean"
 
4982
  "hf_subset": "Privacy",
4983
  "metric": [
4984
  {
4985
+ "metric_name": "acc_norm",
4986
  "higher_is_better": true,
4987
+ "category": "8",
4988
  "use_case": "1",
4989
  "sample_level_fn": "compute",
4990
  "corpus_level_fn": "mean"
 
5020
  "hf_subset": "Trustfulness",
5021
  "metric": [
5022
  {
5023
+ "metric_name": "acc_norm",
5024
  "higher_is_better": true,
5025
+ "category": "8",
5026
  "use_case": "1",
5027
  "sample_level_fn": "compute",
5028
  "corpus_level_fn": "mean"
 
5058
  "hf_subset": "Unfairness",
5059
  "metric": [
5060
  {
5061
+ "metric_name": "acc_norm",
5062
  "higher_is_better": true,
5063
+ "category": "8",
5064
  "use_case": "1",
5065
  "sample_level_fn": "compute",
5066
  "corpus_level_fn": "mean"
 
5168
  "effective_num_docs": 365,
5169
  "must_remove_duplicate_docs": false,
5170
  "version": 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5171
  }
5172
  },
5173
  "summary_tasks": {
 
6699
  },
6700
  "community|aratrust:Ethics|0": {
6701
  "hashes": {
6702
+ "hash_examples": "5d32da36271c5eb4",
6703
+ "hash_full_prompts": "641aaa5a1d0a82cc",
6704
+ "hash_input_tokens": "41df0d7a53dafddf",
6705
+ "hash_cont_tokens": "67fe5dc315ef723c"
6706
  },
6707
+ "truncated": 0,
6708
+ "non_truncated": 60,
6709
+ "padded": 180,
6710
  "non_padded": 0,
6711
  "effective_few_shots": 0.0,
6712
  "num_truncated_few_shots": 0
6713
  },
6714
  "community|aratrust:Illegal|0": {
6715
  "hashes": {
6716
+ "hash_examples": "0c07f1f100f2d0e8",
6717
+ "hash_full_prompts": "9afdf7b67139ac4b",
6718
+ "hash_input_tokens": "078a38959544128f",
6719
+ "hash_cont_tokens": "2cc82a58b4d87abc"
6720
  },
6721
+ "truncated": 0,
6722
+ "non_truncated": 53,
6723
+ "padded": 159,
6724
  "non_padded": 0,
6725
  "effective_few_shots": 0.0,
6726
  "num_truncated_few_shots": 0
6727
  },
6728
  "community|aratrust:MentalHealth|0": {
6729
  "hashes": {
6730
+ "hash_examples": "8e5fc5c4704bd96b",
6731
+ "hash_full_prompts": "e1e262c050abe215",
6732
+ "hash_input_tokens": "1413fffc8d02a4d4",
6733
+ "hash_cont_tokens": "7b399d0f0a9124f1"
6734
  },
6735
+ "truncated": 0,
6736
+ "non_truncated": 76,
6737
+ "padded": 228,
6738
  "non_padded": 0,
6739
  "effective_few_shots": 0.0,
6740
  "num_truncated_few_shots": 0
6741
  },
6742
  "community|aratrust:Offensive|0": {
6743
  "hashes": {
6744
+ "hash_examples": "5ad4369b7dc5de46",
6745
+ "hash_full_prompts": "783eb34b50ddc3dc",
6746
+ "hash_input_tokens": "28abef033a0e7e22",
6747
+ "hash_cont_tokens": "0cd5015bc3370adf"
6748
  },
6749
+ "truncated": 0,
6750
+ "non_truncated": 69,
6751
+ "padded": 207,
6752
  "non_padded": 0,
6753
  "effective_few_shots": 0.0,
6754
  "num_truncated_few_shots": 0
6755
  },
6756
  "community|aratrust:PhysicalHealth|0": {
6757
  "hashes": {
6758
+ "hash_examples": "dc2a632e2dcc86db",
6759
+ "hash_full_prompts": "8368558e325841b5",
6760
+ "hash_input_tokens": "d286528877a50694",
6761
+ "hash_cont_tokens": "cb8655dcad91858d"
6762
  },
6763
+ "truncated": 0,
6764
+ "non_truncated": 73,
6765
+ "padded": 210,
6766
+ "non_padded": 9,
6767
  "effective_few_shots": 0.0,
6768
  "num_truncated_few_shots": 0
6769
  },
6770
  "community|aratrust:Privacy|0": {
6771
  "hashes": {
6772
+ "hash_examples": "295e35448a39e003",
6773
+ "hash_full_prompts": "099b63b7ccb2c9a9",
6774
+ "hash_input_tokens": "ebd5ae9b68922b08",
6775
+ "hash_cont_tokens": "7f23416c661e2ee5"
6776
  },
6777
+ "truncated": 0,
6778
+ "non_truncated": 57,
6779
+ "padded": 162,
6780
+ "non_padded": 9,
6781
  "effective_few_shots": 0.0,
6782
  "num_truncated_few_shots": 0
6783
  },
6784
  "community|aratrust:Trustfulness|0": {
6785
  "hashes": {
6786
+ "hash_examples": "e79ac1ea5439e623",
6787
+ "hash_full_prompts": "fc6808c8672c7adf",
6788
+ "hash_input_tokens": "f1e2ab1163d7c1e0",
6789
+ "hash_cont_tokens": "ff874dba360c1ede"
6790
  },
6791
+ "truncated": 0,
6792
+ "non_truncated": 78,
6793
+ "padded": 228,
6794
+ "non_padded": 6,
6795
  "effective_few_shots": 0.0,
6796
  "num_truncated_few_shots": 0
6797
  },
6798
  "community|aratrust:Unfairness|0": {
6799
  "hashes": {
6800
+ "hash_examples": "4ac5dccbfbdc5077",
6801
+ "hash_full_prompts": "74179b05a376e621",
6802
+ "hash_input_tokens": "5a105432971a1665",
6803
+ "hash_cont_tokens": "3e990fe3a474dbc5"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6804
  },
6805
+ "truncated": 0,
6806
+ "non_truncated": 55,
6807
+ "padded": 159,
6808
+ "non_padded": 6,
6809
  "effective_few_shots": 0.0,
6810
  "num_truncated_few_shots": 0
6811
  }