Datasets:
OALL
/

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
amztheory commited on
Commit
5a034b2
·
verified ·
1 Parent(s): 22c9459

Upload INSAIT-Institute/BgGPT-7B-Instruct-v0.2/results_2025-01-18T00-11-37.884002.json with huggingface_hub

Browse files
INSAIT-Institute/BgGPT-7B-Instruct-v0.2/results_2025-01-18T00-11-37.884002.json CHANGED
@@ -451,36 +451,36 @@
451
  "acc_norm_stderr": 0.02423237247216461
452
  },
453
  "community|aratrust:Ethics|0": {
454
- "f1": 0.45,
455
- "f1_stderr": 0.06476816543825593
456
  },
457
  "community|aratrust:Illegal|0": {
458
- "f1": 0.5849056603773585,
459
- "f1_stderr": 0.06833050743880026
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
- "f1": 0.5657894736842105,
463
- "f1_stderr": 0.05723306097613563
464
  },
465
  "community|aratrust:Offensive|0": {
466
- "f1": 0.2028985507246377,
467
- "f1_stderr": 0.04876877147472663
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
- "f1": 0.4520547945205479,
471
- "f1_stderr": 0.0586540300395668
472
  },
473
  "community|aratrust:Privacy|0": {
474
- "f1": 0.631578947368421,
475
- "f1_stderr": 0.06446025638903098
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
- "f1": 0.3717948717948718,
479
- "f1_stderr": 0.055075325644338716
480
  },
481
  "community|aratrust:Unfairness|0": {
482
- "f1": 0.6,
483
- "f1_stderr": 0.06666666666666667
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.4864697143357528,
@@ -499,18 +499,14 @@
499
  "acc_norm_stderr": 0.021639928855762495
500
  },
501
  "community|aratrust:_average|0": {
502
- "f1": 0.4823777873087559,
503
- "f1_stderr": 0.0604945980084402
504
  },
505
  "all": {
506
  "acc_norm": 0.36936555555667006,
507
  "acc_norm_stderr": 0.03246469464782241,
508
  "f1": 0.4823777873087559,
509
  "f1_stderr": 0.0604945980084402
510
- },
511
- "community|alrage_qa|0": {
512
- "llm_as_judge": 0.4880816714150033,
513
- "llm_as_judge_stderr": 0.00017262536866633494
514
  }
515
  },
516
  "versions": {
@@ -630,8 +626,7 @@
630
  "community|aratrust:Trustfulness|0": 0,
631
  "community|aratrust:Unfairness|0": 0,
632
  "community|madinah_qa:Arabic Language (General)|0": 0,
633
- "community|madinah_qa:Arabic Language (Grammar)|0": 0,
634
- "community|alrage_qa|0": 0
635
  },
636
  "config_tasks": {
637
  "community|alghafa:mcq_exams_test_ar": {
@@ -4797,9 +4792,9 @@
4797
  "hf_subset": "Ethics",
4798
  "metric": [
4799
  {
4800
- "metric_name": "f1",
4801
  "higher_is_better": true,
4802
- "category": "3",
4803
  "use_case": "1",
4804
  "sample_level_fn": "compute",
4805
  "corpus_level_fn": "mean"
@@ -4835,9 +4830,9 @@
4835
  "hf_subset": "Illegal",
4836
  "metric": [
4837
  {
4838
- "metric_name": "f1",
4839
  "higher_is_better": true,
4840
- "category": "3",
4841
  "use_case": "1",
4842
  "sample_level_fn": "compute",
4843
  "corpus_level_fn": "mean"
@@ -4873,9 +4868,9 @@
4873
  "hf_subset": "MentalHealth",
4874
  "metric": [
4875
  {
4876
- "metric_name": "f1",
4877
  "higher_is_better": true,
4878
- "category": "3",
4879
  "use_case": "1",
4880
  "sample_level_fn": "compute",
4881
  "corpus_level_fn": "mean"
@@ -4911,9 +4906,9 @@
4911
  "hf_subset": "Offensive",
4912
  "metric": [
4913
  {
4914
- "metric_name": "f1",
4915
  "higher_is_better": true,
4916
- "category": "3",
4917
  "use_case": "1",
4918
  "sample_level_fn": "compute",
4919
  "corpus_level_fn": "mean"
@@ -4949,9 +4944,9 @@
4949
  "hf_subset": "PhysicalHealth",
4950
  "metric": [
4951
  {
4952
- "metric_name": "f1",
4953
  "higher_is_better": true,
4954
- "category": "3",
4955
  "use_case": "1",
4956
  "sample_level_fn": "compute",
4957
  "corpus_level_fn": "mean"
@@ -4987,9 +4982,9 @@
4987
  "hf_subset": "Privacy",
4988
  "metric": [
4989
  {
4990
- "metric_name": "f1",
4991
  "higher_is_better": true,
4992
- "category": "3",
4993
  "use_case": "1",
4994
  "sample_level_fn": "compute",
4995
  "corpus_level_fn": "mean"
@@ -5025,9 +5020,9 @@
5025
  "hf_subset": "Trustfulness",
5026
  "metric": [
5027
  {
5028
- "metric_name": "f1",
5029
  "higher_is_better": true,
5030
- "category": "3",
5031
  "use_case": "1",
5032
  "sample_level_fn": "compute",
5033
  "corpus_level_fn": "mean"
@@ -5063,9 +5058,9 @@
5063
  "hf_subset": "Unfairness",
5064
  "metric": [
5065
  {
5066
- "metric_name": "f1",
5067
  "higher_is_better": true,
5068
- "category": "3",
5069
  "use_case": "1",
5070
  "sample_level_fn": "compute",
5071
  "corpus_level_fn": "mean"
@@ -5173,44 +5168,6 @@
5173
  "effective_num_docs": 365,
5174
  "must_remove_duplicate_docs": false,
5175
  "version": 0
5176
- },
5177
- "community|alrage_qa": {
5178
- "name": "alrage_qa",
5179
- "prompt_function": "qa_prompt_arabic",
5180
- "hf_repo": "OALL/ALRAGE",
5181
- "hf_subset": null,
5182
- "metric": [
5183
- {
5184
- "metric_name": "llm_as_judge",
5185
- "higher_is_better": true,
5186
- "category": "7",
5187
- "use_case": "10",
5188
- "sample_level_fn": "_sample_level_fn",
5189
- "corpus_level_fn": "aggregate_scores"
5190
- }
5191
- ],
5192
- "hf_revision": null,
5193
- "hf_filter": null,
5194
- "hf_avail_splits": [
5195
- "train"
5196
- ],
5197
- "trust_dataset": true,
5198
- "evaluation_splits": [
5199
- "train"
5200
- ],
5201
- "few_shots_split": null,
5202
- "few_shots_select": null,
5203
- "generation_size": 200,
5204
- "generation_grammar": null,
5205
- "stop_sequence": [],
5206
- "num_samples": null,
5207
- "suite": [
5208
- "community"
5209
- ],
5210
- "original_num_docs": 2106,
5211
- "effective_num_docs": 2106,
5212
- "must_remove_duplicate_docs": false,
5213
- "version": 0
5214
  }
5215
  },
5216
  "summary_tasks": {
@@ -6742,127 +6699,113 @@
6742
  },
6743
  "community|aratrust:Ethics|0": {
6744
  "hashes": {
6745
- "hash_examples": "b77354655caca219",
6746
- "hash_full_prompts": "f8b5e7def5f10076",
6747
- "hash_input_tokens": "0700922be33c6649",
6748
- "hash_cont_tokens": "6918cdbb6cf2cc13"
6749
  },
6750
  "truncated": 0,
6751
  "non_truncated": 60,
6752
- "padded": 60,
6753
  "non_padded": 0,
6754
  "effective_few_shots": 0.0,
6755
  "num_truncated_few_shots": 0
6756
  },
6757
  "community|aratrust:Illegal|0": {
6758
  "hashes": {
6759
- "hash_examples": "daa90cfb03dd9ed8",
6760
- "hash_full_prompts": "da6264fe4cf2c9d4",
6761
- "hash_input_tokens": "4065edd0530b5b8a",
6762
- "hash_cont_tokens": "4ce9f6198ff9723b"
6763
  },
6764
  "truncated": 0,
6765
  "non_truncated": 53,
6766
- "padded": 53,
6767
- "non_padded": 0,
6768
  "effective_few_shots": 0.0,
6769
  "num_truncated_few_shots": 0
6770
  },
6771
  "community|aratrust:MentalHealth|0": {
6772
  "hashes": {
6773
- "hash_examples": "ca046355c96d95d9",
6774
- "hash_full_prompts": "64695b94788fcdb7",
6775
- "hash_input_tokens": "53d4f4bfd65c51ed",
6776
- "hash_cont_tokens": "e4c175564e2ba9c9"
6777
  },
6778
  "truncated": 0,
6779
  "non_truncated": 76,
6780
- "padded": 76,
6781
  "non_padded": 0,
6782
  "effective_few_shots": 0.0,
6783
  "num_truncated_few_shots": 0
6784
  },
6785
  "community|aratrust:Offensive|0": {
6786
  "hashes": {
6787
- "hash_examples": "6ff77d23c0f3113d",
6788
- "hash_full_prompts": "88fff855acdcc795",
6789
- "hash_input_tokens": "3842072bc985ffaa",
6790
- "hash_cont_tokens": "53e74b4e286c98ca"
6791
  },
6792
  "truncated": 0,
6793
  "non_truncated": 69,
6794
- "padded": 69,
6795
- "non_padded": 0,
6796
  "effective_few_shots": 0.0,
6797
  "num_truncated_few_shots": 0
6798
  },
6799
  "community|aratrust:PhysicalHealth|0": {
6800
  "hashes": {
6801
- "hash_examples": "085db2421f8abf29",
6802
- "hash_full_prompts": "365a85584eb463a9",
6803
- "hash_input_tokens": "8f0204ff1f66cd78",
6804
- "hash_cont_tokens": "7b2b73a37e06c2a7"
6805
  },
6806
  "truncated": 0,
6807
  "non_truncated": 73,
6808
- "padded": 73,
6809
- "non_padded": 0,
6810
  "effective_few_shots": 0.0,
6811
  "num_truncated_few_shots": 0
6812
  },
6813
  "community|aratrust:Privacy|0": {
6814
  "hashes": {
6815
- "hash_examples": "78f4d16753b18c49",
6816
- "hash_full_prompts": "a8758c0fa82c937a",
6817
- "hash_input_tokens": "ebb5fac1b3bbc6fd",
6818
- "hash_cont_tokens": "bb814f83ca2ded8c"
6819
  },
6820
- "truncated": 1,
6821
- "non_truncated": 56,
6822
- "padded": 56,
6823
- "non_padded": 1,
6824
  "effective_few_shots": 0.0,
6825
  "num_truncated_few_shots": 0
6826
  },
6827
  "community|aratrust:Trustfulness|0": {
6828
  "hashes": {
6829
- "hash_examples": "373f72b4e30243c4",
6830
- "hash_full_prompts": "b8c9d90248575a98",
6831
- "hash_input_tokens": "9ac937d3605e7568",
6832
- "hash_cont_tokens": "c99714d829711148"
6833
  },
6834
  "truncated": 0,
6835
  "non_truncated": 78,
6836
- "padded": 78,
6837
- "non_padded": 0,
6838
  "effective_few_shots": 0.0,
6839
  "num_truncated_few_shots": 0
6840
  },
6841
  "community|aratrust:Unfairness|0": {
6842
  "hashes": {
6843
- "hash_examples": "51fa7940e42ffcc6",
6844
- "hash_full_prompts": "4e76b4afb8fbbd2e",
6845
- "hash_input_tokens": "e05bd1bb26c1965f",
6846
- "hash_cont_tokens": "e76897670bacfe2a"
6847
  },
6848
  "truncated": 0,
6849
  "non_truncated": 55,
6850
- "padded": 55,
6851
- "non_padded": 0,
6852
- "effective_few_shots": 0.0,
6853
- "num_truncated_few_shots": 0
6854
- },
6855
- "community|alrage_qa|0": {
6856
- "hashes": {
6857
- "hash_examples": "3edbbe22cabd4160",
6858
- "hash_full_prompts": "9b8e4209c0ed7c83",
6859
- "hash_input_tokens": "3bd1579614e13e40",
6860
- "hash_cont_tokens": "fa5318698a5cdee2"
6861
- },
6862
- "truncated": 5,
6863
- "non_truncated": 2101,
6864
- "padded": 2106,
6865
- "non_padded": 0,
6866
  "effective_few_shots": 0.0,
6867
  "num_truncated_few_shots": 0
6868
  }
 
451
  "acc_norm_stderr": 0.02423237247216461
452
  },
453
  "community|aratrust:Ethics|0": {
454
+ "acc_norm": 0.31666666666666665,
455
+ "acc_norm_stderr": 0.06056078175067057
456
  },
457
  "community|aratrust:Illegal|0": {
458
+ "acc_norm": 0.5660377358490566,
459
+ "acc_norm_stderr": 0.06873010445603235
460
  },
461
  "community|aratrust:MentalHealth|0": {
462
+ "acc_norm": 0.5263157894736842,
463
+ "acc_norm_stderr": 0.05765500605317536
464
  },
465
  "community|aratrust:Offensive|0": {
466
+ "acc_norm": 0.2463768115942029,
467
+ "acc_norm_stderr": 0.05225436631107233
468
  },
469
  "community|aratrust:PhysicalHealth|0": {
470
+ "acc_norm": 0.4520547945205479,
471
+ "acc_norm_stderr": 0.0586540300395668
472
  },
473
  "community|aratrust:Privacy|0": {
474
+ "acc_norm": 0.6842105263157895,
475
+ "acc_norm_stderr": 0.06211545730021919
476
  },
477
  "community|aratrust:Trustfulness|0": {
478
+ "acc_norm": 0.3076923076923077,
479
+ "acc_norm_stderr": 0.0525971891352175
480
  },
481
  "community|aratrust:Unfairness|0": {
482
+ "acc_norm": 0.6363636363636364,
483
+ "acc_norm_stderr": 0.06546202725664503
484
  },
485
  "community|alghafa:_average|0": {
486
  "acc_norm": 0.4864697143357528,
 
499
  "acc_norm_stderr": 0.021639928855762495
500
  },
501
  "community|aratrust:_average|0": {
502
+ "acc_norm": 0.4669647835594864,
503
+ "acc_norm_stderr": 0.05975362028782489
504
  },
505
  "all": {
506
  "acc_norm": 0.36936555555667006,
507
  "acc_norm_stderr": 0.03246469464782241,
508
  "f1": 0.4823777873087559,
509
  "f1_stderr": 0.0604945980084402
 
 
 
 
510
  }
511
  },
512
  "versions": {
 
626
  "community|aratrust:Trustfulness|0": 0,
627
  "community|aratrust:Unfairness|0": 0,
628
  "community|madinah_qa:Arabic Language (General)|0": 0,
629
+ "community|madinah_qa:Arabic Language (Grammar)|0": 0
 
630
  },
631
  "config_tasks": {
632
  "community|alghafa:mcq_exams_test_ar": {
 
4792
  "hf_subset": "Ethics",
4793
  "metric": [
4794
  {
4795
+ "metric_name": "acc_norm",
4796
  "higher_is_better": true,
4797
+ "category": "8",
4798
  "use_case": "1",
4799
  "sample_level_fn": "compute",
4800
  "corpus_level_fn": "mean"
 
4830
  "hf_subset": "Illegal",
4831
  "metric": [
4832
  {
4833
+ "metric_name": "acc_norm",
4834
  "higher_is_better": true,
4835
+ "category": "8",
4836
  "use_case": "1",
4837
  "sample_level_fn": "compute",
4838
  "corpus_level_fn": "mean"
 
4868
  "hf_subset": "MentalHealth",
4869
  "metric": [
4870
  {
4871
+ "metric_name": "acc_norm",
4872
  "higher_is_better": true,
4873
+ "category": "8",
4874
  "use_case": "1",
4875
  "sample_level_fn": "compute",
4876
  "corpus_level_fn": "mean"
 
4906
  "hf_subset": "Offensive",
4907
  "metric": [
4908
  {
4909
+ "metric_name": "acc_norm",
4910
  "higher_is_better": true,
4911
+ "category": "8",
4912
  "use_case": "1",
4913
  "sample_level_fn": "compute",
4914
  "corpus_level_fn": "mean"
 
4944
  "hf_subset": "PhysicalHealth",
4945
  "metric": [
4946
  {
4947
+ "metric_name": "acc_norm",
4948
  "higher_is_better": true,
4949
+ "category": "8",
4950
  "use_case": "1",
4951
  "sample_level_fn": "compute",
4952
  "corpus_level_fn": "mean"
 
4982
  "hf_subset": "Privacy",
4983
  "metric": [
4984
  {
4985
+ "metric_name": "acc_norm",
4986
  "higher_is_better": true,
4987
+ "category": "8",
4988
  "use_case": "1",
4989
  "sample_level_fn": "compute",
4990
  "corpus_level_fn": "mean"
 
5020
  "hf_subset": "Trustfulness",
5021
  "metric": [
5022
  {
5023
+ "metric_name": "acc_norm",
5024
  "higher_is_better": true,
5025
+ "category": "8",
5026
  "use_case": "1",
5027
  "sample_level_fn": "compute",
5028
  "corpus_level_fn": "mean"
 
5058
  "hf_subset": "Unfairness",
5059
  "metric": [
5060
  {
5061
+ "metric_name": "acc_norm",
5062
  "higher_is_better": true,
5063
+ "category": "8",
5064
  "use_case": "1",
5065
  "sample_level_fn": "compute",
5066
  "corpus_level_fn": "mean"
 
5168
  "effective_num_docs": 365,
5169
  "must_remove_duplicate_docs": false,
5170
  "version": 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5171
  }
5172
  },
5173
  "summary_tasks": {
 
6699
  },
6700
  "community|aratrust:Ethics|0": {
6701
  "hashes": {
6702
+ "hash_examples": "5d32da36271c5eb4",
6703
+ "hash_full_prompts": "8a133dd98514cb63",
6704
+ "hash_input_tokens": "b53da6f7613095aa",
6705
+ "hash_cont_tokens": "6d3c4f79032274e3"
6706
  },
6707
  "truncated": 0,
6708
  "non_truncated": 60,
6709
+ "padded": 180,
6710
  "non_padded": 0,
6711
  "effective_few_shots": 0.0,
6712
  "num_truncated_few_shots": 0
6713
  },
6714
  "community|aratrust:Illegal|0": {
6715
  "hashes": {
6716
+ "hash_examples": "0c07f1f100f2d0e8",
6717
+ "hash_full_prompts": "0f87fe63b8ffc43b",
6718
+ "hash_input_tokens": "12dd615eb2687157",
6719
+ "hash_cont_tokens": "96661a81708ac4db"
6720
  },
6721
  "truncated": 0,
6722
  "non_truncated": 53,
6723
+ "padded": 158,
6724
+ "non_padded": 1,
6725
  "effective_few_shots": 0.0,
6726
  "num_truncated_few_shots": 0
6727
  },
6728
  "community|aratrust:MentalHealth|0": {
6729
  "hashes": {
6730
+ "hash_examples": "8e5fc5c4704bd96b",
6731
+ "hash_full_prompts": "99031dbf490653a9",
6732
+ "hash_input_tokens": "a2107dd3b741ea7f",
6733
+ "hash_cont_tokens": "7de7c348bf205428"
6734
  },
6735
  "truncated": 0,
6736
  "non_truncated": 76,
6737
+ "padded": 228,
6738
  "non_padded": 0,
6739
  "effective_few_shots": 0.0,
6740
  "num_truncated_few_shots": 0
6741
  },
6742
  "community|aratrust:Offensive|0": {
6743
  "hashes": {
6744
+ "hash_examples": "5ad4369b7dc5de46",
6745
+ "hash_full_prompts": "13da452e44fe97d7",
6746
+ "hash_input_tokens": "784b6ececea15d6a",
6747
+ "hash_cont_tokens": "21fff227540ef87f"
6748
  },
6749
  "truncated": 0,
6750
  "non_truncated": 69,
6751
+ "padded": 201,
6752
+ "non_padded": 6,
6753
  "effective_few_shots": 0.0,
6754
  "num_truncated_few_shots": 0
6755
  },
6756
  "community|aratrust:PhysicalHealth|0": {
6757
  "hashes": {
6758
+ "hash_examples": "dc2a632e2dcc86db",
6759
+ "hash_full_prompts": "9338fd082d078e64",
6760
+ "hash_input_tokens": "fcee4e3919f2fc0f",
6761
+ "hash_cont_tokens": "cb15d3c7fcbfacda"
6762
  },
6763
  "truncated": 0,
6764
  "non_truncated": 73,
6765
+ "padded": 216,
6766
+ "non_padded": 3,
6767
  "effective_few_shots": 0.0,
6768
  "num_truncated_few_shots": 0
6769
  },
6770
  "community|aratrust:Privacy|0": {
6771
  "hashes": {
6772
+ "hash_examples": "295e35448a39e003",
6773
+ "hash_full_prompts": "e0ab2bd04d8c0619",
6774
+ "hash_input_tokens": "1bbed3c32a883a84",
6775
+ "hash_cont_tokens": "70fccc32c4a4ab92"
6776
  },
6777
+ "truncated": 0,
6778
+ "non_truncated": 57,
6779
+ "padded": 168,
6780
+ "non_padded": 3,
6781
  "effective_few_shots": 0.0,
6782
  "num_truncated_few_shots": 0
6783
  },
6784
  "community|aratrust:Trustfulness|0": {
6785
  "hashes": {
6786
+ "hash_examples": "e79ac1ea5439e623",
6787
+ "hash_full_prompts": "86073c3a001750e5",
6788
+ "hash_input_tokens": "7366ef183624b5ae",
6789
+ "hash_cont_tokens": "fae099ce346e460d"
6790
  },
6791
  "truncated": 0,
6792
  "non_truncated": 78,
6793
+ "padded": 231,
6794
+ "non_padded": 3,
6795
  "effective_few_shots": 0.0,
6796
  "num_truncated_few_shots": 0
6797
  },
6798
  "community|aratrust:Unfairness|0": {
6799
  "hashes": {
6800
+ "hash_examples": "4ac5dccbfbdc5077",
6801
+ "hash_full_prompts": "9937ee1daa8cc66d",
6802
+ "hash_input_tokens": "1a2dc9fc7af8c02c",
6803
+ "hash_cont_tokens": "d630872c1db07063"
6804
  },
6805
  "truncated": 0,
6806
  "non_truncated": 55,
6807
+ "padded": 154,
6808
+ "non_padded": 11,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6809
  "effective_few_shots": 0.0,
6810
  "num_truncated_few_shots": 0
6811
  }