Upload inceptionai/jais-adapted-7b/results_2025-01-22T19-03-54.144225.json with huggingface_hub
Browse files
inceptionai/jais-adapted-7b/results_2025-01-22T19-03-54.144225.json
CHANGED
@@ -451,36 +451,36 @@
|
|
451 |
"acc_norm_stderr": 0.024050431713518183
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
-
"
|
455 |
-
"
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
-
"
|
459 |
-
"
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
-
"
|
463 |
-
"
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
-
"
|
467 |
-
"
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
-
"
|
471 |
-
"
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
-
"
|
475 |
-
"
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
-
"
|
479 |
-
"
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
-
"
|
483 |
-
"
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.39077482280372144,
|
@@ -499,8 +499,8 @@
|
|
499 |
"acc_norm_stderr": 0.021525069548340423
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
-
"
|
503 |
-
"
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.2840655444933057,
|
@@ -4797,9 +4797,9 @@
|
|
4797 |
"hf_subset": "Ethics",
|
4798 |
"metric": [
|
4799 |
{
|
4800 |
-
"metric_name": "
|
4801 |
"higher_is_better": true,
|
4802 |
-
"category": "
|
4803 |
"use_case": "1",
|
4804 |
"sample_level_fn": "compute",
|
4805 |
"corpus_level_fn": "mean"
|
@@ -4835,9 +4835,9 @@
|
|
4835 |
"hf_subset": "Illegal",
|
4836 |
"metric": [
|
4837 |
{
|
4838 |
-
"metric_name": "
|
4839 |
"higher_is_better": true,
|
4840 |
-
"category": "
|
4841 |
"use_case": "1",
|
4842 |
"sample_level_fn": "compute",
|
4843 |
"corpus_level_fn": "mean"
|
@@ -4873,9 +4873,9 @@
|
|
4873 |
"hf_subset": "MentalHealth",
|
4874 |
"metric": [
|
4875 |
{
|
4876 |
-
"metric_name": "
|
4877 |
"higher_is_better": true,
|
4878 |
-
"category": "
|
4879 |
"use_case": "1",
|
4880 |
"sample_level_fn": "compute",
|
4881 |
"corpus_level_fn": "mean"
|
@@ -4911,9 +4911,9 @@
|
|
4911 |
"hf_subset": "Offensive",
|
4912 |
"metric": [
|
4913 |
{
|
4914 |
-
"metric_name": "
|
4915 |
"higher_is_better": true,
|
4916 |
-
"category": "
|
4917 |
"use_case": "1",
|
4918 |
"sample_level_fn": "compute",
|
4919 |
"corpus_level_fn": "mean"
|
@@ -4949,9 +4949,9 @@
|
|
4949 |
"hf_subset": "PhysicalHealth",
|
4950 |
"metric": [
|
4951 |
{
|
4952 |
-
"metric_name": "
|
4953 |
"higher_is_better": true,
|
4954 |
-
"category": "
|
4955 |
"use_case": "1",
|
4956 |
"sample_level_fn": "compute",
|
4957 |
"corpus_level_fn": "mean"
|
@@ -4987,9 +4987,9 @@
|
|
4987 |
"hf_subset": "Privacy",
|
4988 |
"metric": [
|
4989 |
{
|
4990 |
-
"metric_name": "
|
4991 |
"higher_is_better": true,
|
4992 |
-
"category": "
|
4993 |
"use_case": "1",
|
4994 |
"sample_level_fn": "compute",
|
4995 |
"corpus_level_fn": "mean"
|
@@ -5025,9 +5025,9 @@
|
|
5025 |
"hf_subset": "Trustfulness",
|
5026 |
"metric": [
|
5027 |
{
|
5028 |
-
"metric_name": "
|
5029 |
"higher_is_better": true,
|
5030 |
-
"category": "
|
5031 |
"use_case": "1",
|
5032 |
"sample_level_fn": "compute",
|
5033 |
"corpus_level_fn": "mean"
|
@@ -5063,9 +5063,9 @@
|
|
5063 |
"hf_subset": "Unfairness",
|
5064 |
"metric": [
|
5065 |
{
|
5066 |
-
"metric_name": "
|
5067 |
"higher_is_better": true,
|
5068 |
-
"category": "
|
5069 |
"use_case": "1",
|
5070 |
"sample_level_fn": "compute",
|
5071 |
"corpus_level_fn": "mean"
|
@@ -6742,113 +6742,113 @@
|
|
6742 |
},
|
6743 |
"community|aratrust:Ethics|0": {
|
6744 |
"hashes": {
|
6745 |
-
"hash_examples": "
|
6746 |
-
"hash_full_prompts": "
|
6747 |
-
"hash_input_tokens": "
|
6748 |
-
"hash_cont_tokens": "
|
6749 |
},
|
6750 |
-
"truncated":
|
6751 |
-
"non_truncated":
|
6752 |
-
"padded":
|
6753 |
-
"non_padded":
|
6754 |
"effective_few_shots": 0.0,
|
6755 |
"num_truncated_few_shots": 0
|
6756 |
},
|
6757 |
"community|aratrust:Illegal|0": {
|
6758 |
"hashes": {
|
6759 |
-
"hash_examples": "
|
6760 |
-
"hash_full_prompts": "
|
6761 |
-
"hash_input_tokens": "
|
6762 |
-
"hash_cont_tokens": "
|
6763 |
},
|
6764 |
-
"truncated":
|
6765 |
-
"non_truncated":
|
6766 |
-
"padded":
|
6767 |
-
"non_padded":
|
6768 |
"effective_few_shots": 0.0,
|
6769 |
"num_truncated_few_shots": 0
|
6770 |
},
|
6771 |
"community|aratrust:MentalHealth|0": {
|
6772 |
"hashes": {
|
6773 |
-
"hash_examples": "
|
6774 |
-
"hash_full_prompts": "
|
6775 |
-
"hash_input_tokens": "
|
6776 |
-
"hash_cont_tokens": "
|
6777 |
},
|
6778 |
-
"truncated":
|
6779 |
-
"non_truncated":
|
6780 |
-
"padded":
|
6781 |
-
"non_padded":
|
6782 |
"effective_few_shots": 0.0,
|
6783 |
"num_truncated_few_shots": 0
|
6784 |
},
|
6785 |
"community|aratrust:Offensive|0": {
|
6786 |
"hashes": {
|
6787 |
-
"hash_examples": "
|
6788 |
-
"hash_full_prompts": "
|
6789 |
-
"hash_input_tokens": "
|
6790 |
-
"hash_cont_tokens": "
|
6791 |
},
|
6792 |
-
"truncated":
|
6793 |
-
"non_truncated":
|
6794 |
-
"padded":
|
6795 |
-
"non_padded":
|
6796 |
"effective_few_shots": 0.0,
|
6797 |
"num_truncated_few_shots": 0
|
6798 |
},
|
6799 |
"community|aratrust:PhysicalHealth|0": {
|
6800 |
"hashes": {
|
6801 |
-
"hash_examples": "
|
6802 |
-
"hash_full_prompts": "
|
6803 |
-
"hash_input_tokens": "
|
6804 |
-
"hash_cont_tokens": "
|
6805 |
},
|
6806 |
-
"truncated":
|
6807 |
-
"non_truncated":
|
6808 |
-
"padded":
|
6809 |
-
"non_padded":
|
6810 |
"effective_few_shots": 0.0,
|
6811 |
"num_truncated_few_shots": 0
|
6812 |
},
|
6813 |
"community|aratrust:Privacy|0": {
|
6814 |
"hashes": {
|
6815 |
-
"hash_examples": "
|
6816 |
-
"hash_full_prompts": "
|
6817 |
-
"hash_input_tokens": "
|
6818 |
-
"hash_cont_tokens": "
|
6819 |
},
|
6820 |
-
"truncated":
|
6821 |
-
"non_truncated":
|
6822 |
-
"padded":
|
6823 |
-
"non_padded":
|
6824 |
"effective_few_shots": 0.0,
|
6825 |
"num_truncated_few_shots": 0
|
6826 |
},
|
6827 |
"community|aratrust:Trustfulness|0": {
|
6828 |
"hashes": {
|
6829 |
-
"hash_examples": "
|
6830 |
-
"hash_full_prompts": "
|
6831 |
-
"hash_input_tokens": "
|
6832 |
-
"hash_cont_tokens": "
|
6833 |
},
|
6834 |
"truncated": 0,
|
6835 |
"non_truncated": 78,
|
6836 |
-
"padded":
|
6837 |
-
"non_padded":
|
6838 |
"effective_few_shots": 0.0,
|
6839 |
"num_truncated_few_shots": 0
|
6840 |
},
|
6841 |
"community|aratrust:Unfairness|0": {
|
6842 |
"hashes": {
|
6843 |
-
"hash_examples": "
|
6844 |
-
"hash_full_prompts": "
|
6845 |
-
"hash_input_tokens": "
|
6846 |
-
"hash_cont_tokens": "
|
6847 |
},
|
6848 |
-
"truncated":
|
6849 |
-
"non_truncated":
|
6850 |
-
"padded":
|
6851 |
-
"non_padded":
|
6852 |
"effective_few_shots": 0.0,
|
6853 |
"num_truncated_few_shots": 0
|
6854 |
},
|
|
|
451 |
"acc_norm_stderr": 0.024050431713518183
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
+
"acc_norm": 0.26666666666666666,
|
455 |
+
"acc_norm_stderr": 0.057571702611783036
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
+
"acc_norm": 0.33962264150943394,
|
459 |
+
"acc_norm_stderr": 0.06567388880550808
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
+
"acc_norm": 0.3815789473684211,
|
463 |
+
"acc_norm_stderr": 0.056092358872800584
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
+
"acc_norm": 0.463768115942029,
|
467 |
+
"acc_norm_stderr": 0.060474502475594896
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
+
"acc_norm": 0.3013698630136986,
|
471 |
+
"acc_norm_stderr": 0.05407634375491785
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
+
"acc_norm": 0.3333333333333333,
|
475 |
+
"acc_norm_stderr": 0.0629940788348712
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
+
"acc_norm": 0.3717948717948718,
|
479 |
+
"acc_norm_stderr": 0.05507532564433871
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
+
"acc_norm": 0.2545454545454545,
|
483 |
+
"acc_norm_stderr": 0.059278386873217015
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.39077482280372144,
|
|
|
499 |
"acc_norm_stderr": 0.021525069548340423
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
+
"acc_norm": 0.33908498677173865,
|
503 |
+
"acc_norm_stderr": 0.05890457348412891
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.2840655444933057,
|
|
|
4797 |
"hf_subset": "Ethics",
|
4798 |
"metric": [
|
4799 |
{
|
4800 |
+
"metric_name": "acc_norm",
|
4801 |
"higher_is_better": true,
|
4802 |
+
"category": "8",
|
4803 |
"use_case": "1",
|
4804 |
"sample_level_fn": "compute",
|
4805 |
"corpus_level_fn": "mean"
|
|
|
4835 |
"hf_subset": "Illegal",
|
4836 |
"metric": [
|
4837 |
{
|
4838 |
+
"metric_name": "acc_norm",
|
4839 |
"higher_is_better": true,
|
4840 |
+
"category": "8",
|
4841 |
"use_case": "1",
|
4842 |
"sample_level_fn": "compute",
|
4843 |
"corpus_level_fn": "mean"
|
|
|
4873 |
"hf_subset": "MentalHealth",
|
4874 |
"metric": [
|
4875 |
{
|
4876 |
+
"metric_name": "acc_norm",
|
4877 |
"higher_is_better": true,
|
4878 |
+
"category": "8",
|
4879 |
"use_case": "1",
|
4880 |
"sample_level_fn": "compute",
|
4881 |
"corpus_level_fn": "mean"
|
|
|
4911 |
"hf_subset": "Offensive",
|
4912 |
"metric": [
|
4913 |
{
|
4914 |
+
"metric_name": "acc_norm",
|
4915 |
"higher_is_better": true,
|
4916 |
+
"category": "8",
|
4917 |
"use_case": "1",
|
4918 |
"sample_level_fn": "compute",
|
4919 |
"corpus_level_fn": "mean"
|
|
|
4949 |
"hf_subset": "PhysicalHealth",
|
4950 |
"metric": [
|
4951 |
{
|
4952 |
+
"metric_name": "acc_norm",
|
4953 |
"higher_is_better": true,
|
4954 |
+
"category": "8",
|
4955 |
"use_case": "1",
|
4956 |
"sample_level_fn": "compute",
|
4957 |
"corpus_level_fn": "mean"
|
|
|
4987 |
"hf_subset": "Privacy",
|
4988 |
"metric": [
|
4989 |
{
|
4990 |
+
"metric_name": "acc_norm",
|
4991 |
"higher_is_better": true,
|
4992 |
+
"category": "8",
|
4993 |
"use_case": "1",
|
4994 |
"sample_level_fn": "compute",
|
4995 |
"corpus_level_fn": "mean"
|
|
|
5025 |
"hf_subset": "Trustfulness",
|
5026 |
"metric": [
|
5027 |
{
|
5028 |
+
"metric_name": "acc_norm",
|
5029 |
"higher_is_better": true,
|
5030 |
+
"category": "8",
|
5031 |
"use_case": "1",
|
5032 |
"sample_level_fn": "compute",
|
5033 |
"corpus_level_fn": "mean"
|
|
|
5063 |
"hf_subset": "Unfairness",
|
5064 |
"metric": [
|
5065 |
{
|
5066 |
+
"metric_name": "acc_norm",
|
5067 |
"higher_is_better": true,
|
5068 |
+
"category": "8",
|
5069 |
"use_case": "1",
|
5070 |
"sample_level_fn": "compute",
|
5071 |
"corpus_level_fn": "mean"
|
|
|
6742 |
},
|
6743 |
"community|aratrust:Ethics|0": {
|
6744 |
"hashes": {
|
6745 |
+
"hash_examples": "5d32da36271c5eb4",
|
6746 |
+
"hash_full_prompts": "5d32da36271c5eb4",
|
6747 |
+
"hash_input_tokens": "3217907dec6017d4",
|
6748 |
+
"hash_cont_tokens": "9403afe6be071fd3"
|
6749 |
},
|
6750 |
+
"truncated": 0,
|
6751 |
+
"non_truncated": 60,
|
6752 |
+
"padded": 168,
|
6753 |
+
"non_padded": 12,
|
6754 |
"effective_few_shots": 0.0,
|
6755 |
"num_truncated_few_shots": 0
|
6756 |
},
|
6757 |
"community|aratrust:Illegal|0": {
|
6758 |
"hashes": {
|
6759 |
+
"hash_examples": "0c07f1f100f2d0e8",
|
6760 |
+
"hash_full_prompts": "0c07f1f100f2d0e8",
|
6761 |
+
"hash_input_tokens": "cd701248220e4560",
|
6762 |
+
"hash_cont_tokens": "c4c1d582c3de428f"
|
6763 |
},
|
6764 |
+
"truncated": 0,
|
6765 |
+
"non_truncated": 53,
|
6766 |
+
"padded": 156,
|
6767 |
+
"non_padded": 3,
|
6768 |
"effective_few_shots": 0.0,
|
6769 |
"num_truncated_few_shots": 0
|
6770 |
},
|
6771 |
"community|aratrust:MentalHealth|0": {
|
6772 |
"hashes": {
|
6773 |
+
"hash_examples": "8e5fc5c4704bd96b",
|
6774 |
+
"hash_full_prompts": "8e5fc5c4704bd96b",
|
6775 |
+
"hash_input_tokens": "0a7253cb950187d7",
|
6776 |
+
"hash_cont_tokens": "ddece1b0a9316221"
|
6777 |
},
|
6778 |
+
"truncated": 0,
|
6779 |
+
"non_truncated": 76,
|
6780 |
+
"padded": 219,
|
6781 |
+
"non_padded": 9,
|
6782 |
"effective_few_shots": 0.0,
|
6783 |
"num_truncated_few_shots": 0
|
6784 |
},
|
6785 |
"community|aratrust:Offensive|0": {
|
6786 |
"hashes": {
|
6787 |
+
"hash_examples": "5ad4369b7dc5de46",
|
6788 |
+
"hash_full_prompts": "5ad4369b7dc5de46",
|
6789 |
+
"hash_input_tokens": "64049d27f7318f4b",
|
6790 |
+
"hash_cont_tokens": "a7133b9cc89b9225"
|
6791 |
},
|
6792 |
+
"truncated": 0,
|
6793 |
+
"non_truncated": 69,
|
6794 |
+
"padded": 195,
|
6795 |
+
"non_padded": 12,
|
6796 |
"effective_few_shots": 0.0,
|
6797 |
"num_truncated_few_shots": 0
|
6798 |
},
|
6799 |
"community|aratrust:PhysicalHealth|0": {
|
6800 |
"hashes": {
|
6801 |
+
"hash_examples": "dc2a632e2dcc86db",
|
6802 |
+
"hash_full_prompts": "dc2a632e2dcc86db",
|
6803 |
+
"hash_input_tokens": "5751e730d1118587",
|
6804 |
+
"hash_cont_tokens": "638273625d8ef1dc"
|
6805 |
},
|
6806 |
+
"truncated": 0,
|
6807 |
+
"non_truncated": 73,
|
6808 |
+
"padded": 198,
|
6809 |
+
"non_padded": 21,
|
6810 |
"effective_few_shots": 0.0,
|
6811 |
"num_truncated_few_shots": 0
|
6812 |
},
|
6813 |
"community|aratrust:Privacy|0": {
|
6814 |
"hashes": {
|
6815 |
+
"hash_examples": "295e35448a39e003",
|
6816 |
+
"hash_full_prompts": "295e35448a39e003",
|
6817 |
+
"hash_input_tokens": "0253b2e608e19608",
|
6818 |
+
"hash_cont_tokens": "1dc5174807ee41ed"
|
6819 |
},
|
6820 |
+
"truncated": 0,
|
6821 |
+
"non_truncated": 57,
|
6822 |
+
"padded": 150,
|
6823 |
+
"non_padded": 21,
|
6824 |
"effective_few_shots": 0.0,
|
6825 |
"num_truncated_few_shots": 0
|
6826 |
},
|
6827 |
"community|aratrust:Trustfulness|0": {
|
6828 |
"hashes": {
|
6829 |
+
"hash_examples": "e79ac1ea5439e623",
|
6830 |
+
"hash_full_prompts": "e79ac1ea5439e623",
|
6831 |
+
"hash_input_tokens": "e8d149b5c7a74e3f",
|
6832 |
+
"hash_cont_tokens": "cc05ef6e19e62e40"
|
6833 |
},
|
6834 |
"truncated": 0,
|
6835 |
"non_truncated": 78,
|
6836 |
+
"padded": 213,
|
6837 |
+
"non_padded": 21,
|
6838 |
"effective_few_shots": 0.0,
|
6839 |
"num_truncated_few_shots": 0
|
6840 |
},
|
6841 |
"community|aratrust:Unfairness|0": {
|
6842 |
"hashes": {
|
6843 |
+
"hash_examples": "4ac5dccbfbdc5077",
|
6844 |
+
"hash_full_prompts": "4ac5dccbfbdc5077",
|
6845 |
+
"hash_input_tokens": "6c0fef0c9f1a3646",
|
6846 |
+
"hash_cont_tokens": "1cf99947d87c13f3"
|
6847 |
},
|
6848 |
+
"truncated": 0,
|
6849 |
+
"non_truncated": 55,
|
6850 |
+
"padded": 153,
|
6851 |
+
"non_padded": 12,
|
6852 |
"effective_few_shots": 0.0,
|
6853 |
"num_truncated_few_shots": 0
|
6854 |
},
|