Upload airev-ai/emirati-14b-v2/results_2025-01-19T15-03-37.580700.json with huggingface_hub
Browse files
airev-ai/emirati-14b-v2/results_2025-01-19T15-03-37.580700.json
CHANGED
@@ -451,36 +451,36 @@
|
|
451 |
"acc_norm_stderr": 0.024622384500627866
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
-
"
|
455 |
-
"
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
-
"
|
459 |
-
"
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
-
"
|
463 |
-
"
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
-
"
|
467 |
-
"
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
-
"
|
471 |
-
"
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
-
"
|
475 |
-
"
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
-
"
|
479 |
-
"
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
-
"
|
483 |
-
"
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.5202669965259062,
|
@@ -499,8 +499,8 @@
|
|
499 |
"acc_norm_stderr": 0.022045951361100775
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
-
"
|
503 |
-
"
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.5233619051657347,
|
@@ -4797,9 +4797,9 @@
|
|
4797 |
"hf_subset": "Ethics",
|
4798 |
"metric": [
|
4799 |
{
|
4800 |
-
"metric_name": "
|
4801 |
"higher_is_better": true,
|
4802 |
-
"category": "
|
4803 |
"use_case": "1",
|
4804 |
"sample_level_fn": "compute",
|
4805 |
"corpus_level_fn": "mean"
|
@@ -4835,9 +4835,9 @@
|
|
4835 |
"hf_subset": "Illegal",
|
4836 |
"metric": [
|
4837 |
{
|
4838 |
-
"metric_name": "
|
4839 |
"higher_is_better": true,
|
4840 |
-
"category": "
|
4841 |
"use_case": "1",
|
4842 |
"sample_level_fn": "compute",
|
4843 |
"corpus_level_fn": "mean"
|
@@ -4873,9 +4873,9 @@
|
|
4873 |
"hf_subset": "MentalHealth",
|
4874 |
"metric": [
|
4875 |
{
|
4876 |
-
"metric_name": "
|
4877 |
"higher_is_better": true,
|
4878 |
-
"category": "
|
4879 |
"use_case": "1",
|
4880 |
"sample_level_fn": "compute",
|
4881 |
"corpus_level_fn": "mean"
|
@@ -4911,9 +4911,9 @@
|
|
4911 |
"hf_subset": "Offensive",
|
4912 |
"metric": [
|
4913 |
{
|
4914 |
-
"metric_name": "
|
4915 |
"higher_is_better": true,
|
4916 |
-
"category": "
|
4917 |
"use_case": "1",
|
4918 |
"sample_level_fn": "compute",
|
4919 |
"corpus_level_fn": "mean"
|
@@ -4949,9 +4949,9 @@
|
|
4949 |
"hf_subset": "PhysicalHealth",
|
4950 |
"metric": [
|
4951 |
{
|
4952 |
-
"metric_name": "
|
4953 |
"higher_is_better": true,
|
4954 |
-
"category": "
|
4955 |
"use_case": "1",
|
4956 |
"sample_level_fn": "compute",
|
4957 |
"corpus_level_fn": "mean"
|
@@ -4987,9 +4987,9 @@
|
|
4987 |
"hf_subset": "Privacy",
|
4988 |
"metric": [
|
4989 |
{
|
4990 |
-
"metric_name": "
|
4991 |
"higher_is_better": true,
|
4992 |
-
"category": "
|
4993 |
"use_case": "1",
|
4994 |
"sample_level_fn": "compute",
|
4995 |
"corpus_level_fn": "mean"
|
@@ -5025,9 +5025,9 @@
|
|
5025 |
"hf_subset": "Trustfulness",
|
5026 |
"metric": [
|
5027 |
{
|
5028 |
-
"metric_name": "
|
5029 |
"higher_is_better": true,
|
5030 |
-
"category": "
|
5031 |
"use_case": "1",
|
5032 |
"sample_level_fn": "compute",
|
5033 |
"corpus_level_fn": "mean"
|
@@ -5063,9 +5063,9 @@
|
|
5063 |
"hf_subset": "Unfairness",
|
5064 |
"metric": [
|
5065 |
{
|
5066 |
-
"metric_name": "
|
5067 |
"higher_is_better": true,
|
5068 |
-
"category": "
|
5069 |
"use_case": "1",
|
5070 |
"sample_level_fn": "compute",
|
5071 |
"corpus_level_fn": "mean"
|
@@ -6742,113 +6742,113 @@
|
|
6742 |
},
|
6743 |
"community|aratrust:Ethics|0": {
|
6744 |
"hashes": {
|
6745 |
-
"hash_examples": "
|
6746 |
-
"hash_full_prompts": "
|
6747 |
-
"hash_input_tokens": "
|
6748 |
-
"hash_cont_tokens": "
|
6749 |
},
|
6750 |
-
"truncated":
|
6751 |
-
"non_truncated":
|
6752 |
-
"padded":
|
6753 |
"non_padded": 0,
|
6754 |
"effective_few_shots": 0.0,
|
6755 |
"num_truncated_few_shots": 0
|
6756 |
},
|
6757 |
"community|aratrust:Illegal|0": {
|
6758 |
"hashes": {
|
6759 |
-
"hash_examples": "
|
6760 |
-
"hash_full_prompts": "
|
6761 |
-
"hash_input_tokens": "
|
6762 |
-
"hash_cont_tokens": "
|
6763 |
},
|
6764 |
-
"truncated":
|
6765 |
-
"non_truncated":
|
6766 |
-
"padded":
|
6767 |
"non_padded": 0,
|
6768 |
"effective_few_shots": 0.0,
|
6769 |
"num_truncated_few_shots": 0
|
6770 |
},
|
6771 |
"community|aratrust:MentalHealth|0": {
|
6772 |
"hashes": {
|
6773 |
-
"hash_examples": "
|
6774 |
-
"hash_full_prompts": "
|
6775 |
-
"hash_input_tokens": "
|
6776 |
-
"hash_cont_tokens": "
|
6777 |
},
|
6778 |
-
"truncated":
|
6779 |
-
"non_truncated":
|
6780 |
-
"padded":
|
6781 |
"non_padded": 0,
|
6782 |
"effective_few_shots": 0.0,
|
6783 |
"num_truncated_few_shots": 0
|
6784 |
},
|
6785 |
"community|aratrust:Offensive|0": {
|
6786 |
"hashes": {
|
6787 |
-
"hash_examples": "
|
6788 |
-
"hash_full_prompts": "
|
6789 |
-
"hash_input_tokens": "
|
6790 |
-
"hash_cont_tokens": "
|
6791 |
},
|
6792 |
-
"truncated":
|
6793 |
-
"non_truncated":
|
6794 |
-
"padded":
|
6795 |
"non_padded": 0,
|
6796 |
"effective_few_shots": 0.0,
|
6797 |
"num_truncated_few_shots": 0
|
6798 |
},
|
6799 |
"community|aratrust:PhysicalHealth|0": {
|
6800 |
"hashes": {
|
6801 |
-
"hash_examples": "
|
6802 |
-
"hash_full_prompts": "
|
6803 |
-
"hash_input_tokens": "
|
6804 |
-
"hash_cont_tokens": "
|
6805 |
},
|
6806 |
-
"truncated":
|
6807 |
-
"non_truncated":
|
6808 |
-
"padded":
|
6809 |
-
"non_padded":
|
6810 |
"effective_few_shots": 0.0,
|
6811 |
"num_truncated_few_shots": 0
|
6812 |
},
|
6813 |
"community|aratrust:Privacy|0": {
|
6814 |
"hashes": {
|
6815 |
-
"hash_examples": "
|
6816 |
-
"hash_full_prompts": "
|
6817 |
-
"hash_input_tokens": "
|
6818 |
-
"hash_cont_tokens": "
|
6819 |
},
|
6820 |
-
"truncated":
|
6821 |
-
"non_truncated":
|
6822 |
-
"padded":
|
6823 |
-
"non_padded":
|
6824 |
"effective_few_shots": 0.0,
|
6825 |
"num_truncated_few_shots": 0
|
6826 |
},
|
6827 |
"community|aratrust:Trustfulness|0": {
|
6828 |
"hashes": {
|
6829 |
-
"hash_examples": "
|
6830 |
-
"hash_full_prompts": "
|
6831 |
-
"hash_input_tokens": "
|
6832 |
-
"hash_cont_tokens": "
|
6833 |
},
|
6834 |
-
"truncated":
|
6835 |
-
"non_truncated":
|
6836 |
-
"padded":
|
6837 |
-
"non_padded":
|
6838 |
"effective_few_shots": 0.0,
|
6839 |
"num_truncated_few_shots": 0
|
6840 |
},
|
6841 |
"community|aratrust:Unfairness|0": {
|
6842 |
"hashes": {
|
6843 |
-
"hash_examples": "
|
6844 |
-
"hash_full_prompts": "
|
6845 |
-
"hash_input_tokens": "
|
6846 |
-
"hash_cont_tokens": "
|
6847 |
},
|
6848 |
-
"truncated":
|
6849 |
-
"non_truncated":
|
6850 |
-
"padded":
|
6851 |
-
"non_padded":
|
6852 |
"effective_few_shots": 0.0,
|
6853 |
"num_truncated_few_shots": 0
|
6854 |
},
|
|
|
451 |
"acc_norm_stderr": 0.024622384500627866
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
+
"acc_norm": 0.7333333333333333,
|
455 |
+
"acc_norm_stderr": 0.057571702611783085
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
+
"acc_norm": 0.8301886792452831,
|
459 |
+
"acc_norm_stderr": 0.05206789873629053
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
+
"acc_norm": 0.9210526315789473,
|
463 |
+
"acc_norm_stderr": 0.03113726201631373
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
+
"acc_norm": 0.7971014492753623,
|
467 |
+
"acc_norm_stderr": 0.04876877147472662
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
+
"acc_norm": 0.821917808219178,
|
471 |
+
"acc_norm_stderr": 0.04508771154824544
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
+
"acc_norm": 0.9122807017543859,
|
475 |
+
"acc_norm_stderr": 0.0378022634811989
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
+
"acc_norm": 0.7435897435897436,
|
479 |
+
"acc_norm_stderr": 0.0497609919747403
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
+
"acc_norm": 0.7818181818181819,
|
483 |
+
"acc_norm_stderr": 0.05620374845754972
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.5202669965259062,
|
|
|
499 |
"acc_norm_stderr": 0.022045951361100775
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
+
"acc_norm": 0.817660316101802,
|
503 |
+
"acc_norm_stderr": 0.04730004378760604
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.5233619051657347,
|
|
|
4797 |
"hf_subset": "Ethics",
|
4798 |
"metric": [
|
4799 |
{
|
4800 |
+
"metric_name": "acc_norm",
|
4801 |
"higher_is_better": true,
|
4802 |
+
"category": "8",
|
4803 |
"use_case": "1",
|
4804 |
"sample_level_fn": "compute",
|
4805 |
"corpus_level_fn": "mean"
|
|
|
4835 |
"hf_subset": "Illegal",
|
4836 |
"metric": [
|
4837 |
{
|
4838 |
+
"metric_name": "acc_norm",
|
4839 |
"higher_is_better": true,
|
4840 |
+
"category": "8",
|
4841 |
"use_case": "1",
|
4842 |
"sample_level_fn": "compute",
|
4843 |
"corpus_level_fn": "mean"
|
|
|
4873 |
"hf_subset": "MentalHealth",
|
4874 |
"metric": [
|
4875 |
{
|
4876 |
+
"metric_name": "acc_norm",
|
4877 |
"higher_is_better": true,
|
4878 |
+
"category": "8",
|
4879 |
"use_case": "1",
|
4880 |
"sample_level_fn": "compute",
|
4881 |
"corpus_level_fn": "mean"
|
|
|
4911 |
"hf_subset": "Offensive",
|
4912 |
"metric": [
|
4913 |
{
|
4914 |
+
"metric_name": "acc_norm",
|
4915 |
"higher_is_better": true,
|
4916 |
+
"category": "8",
|
4917 |
"use_case": "1",
|
4918 |
"sample_level_fn": "compute",
|
4919 |
"corpus_level_fn": "mean"
|
|
|
4949 |
"hf_subset": "PhysicalHealth",
|
4950 |
"metric": [
|
4951 |
{
|
4952 |
+
"metric_name": "acc_norm",
|
4953 |
"higher_is_better": true,
|
4954 |
+
"category": "8",
|
4955 |
"use_case": "1",
|
4956 |
"sample_level_fn": "compute",
|
4957 |
"corpus_level_fn": "mean"
|
|
|
4987 |
"hf_subset": "Privacy",
|
4988 |
"metric": [
|
4989 |
{
|
4990 |
+
"metric_name": "acc_norm",
|
4991 |
"higher_is_better": true,
|
4992 |
+
"category": "8",
|
4993 |
"use_case": "1",
|
4994 |
"sample_level_fn": "compute",
|
4995 |
"corpus_level_fn": "mean"
|
|
|
5025 |
"hf_subset": "Trustfulness",
|
5026 |
"metric": [
|
5027 |
{
|
5028 |
+
"metric_name": "acc_norm",
|
5029 |
"higher_is_better": true,
|
5030 |
+
"category": "8",
|
5031 |
"use_case": "1",
|
5032 |
"sample_level_fn": "compute",
|
5033 |
"corpus_level_fn": "mean"
|
|
|
5063 |
"hf_subset": "Unfairness",
|
5064 |
"metric": [
|
5065 |
{
|
5066 |
+
"metric_name": "acc_norm",
|
5067 |
"higher_is_better": true,
|
5068 |
+
"category": "8",
|
5069 |
"use_case": "1",
|
5070 |
"sample_level_fn": "compute",
|
5071 |
"corpus_level_fn": "mean"
|
|
|
6742 |
},
|
6743 |
"community|aratrust:Ethics|0": {
|
6744 |
"hashes": {
|
6745 |
+
"hash_examples": "5d32da36271c5eb4",
|
6746 |
+
"hash_full_prompts": "641aaa5a1d0a82cc",
|
6747 |
+
"hash_input_tokens": "41df0d7a53dafddf",
|
6748 |
+
"hash_cont_tokens": "67fe5dc315ef723c"
|
6749 |
},
|
6750 |
+
"truncated": 0,
|
6751 |
+
"non_truncated": 60,
|
6752 |
+
"padded": 180,
|
6753 |
"non_padded": 0,
|
6754 |
"effective_few_shots": 0.0,
|
6755 |
"num_truncated_few_shots": 0
|
6756 |
},
|
6757 |
"community|aratrust:Illegal|0": {
|
6758 |
"hashes": {
|
6759 |
+
"hash_examples": "0c07f1f100f2d0e8",
|
6760 |
+
"hash_full_prompts": "9afdf7b67139ac4b",
|
6761 |
+
"hash_input_tokens": "078a38959544128f",
|
6762 |
+
"hash_cont_tokens": "2cc82a58b4d87abc"
|
6763 |
},
|
6764 |
+
"truncated": 0,
|
6765 |
+
"non_truncated": 53,
|
6766 |
+
"padded": 159,
|
6767 |
"non_padded": 0,
|
6768 |
"effective_few_shots": 0.0,
|
6769 |
"num_truncated_few_shots": 0
|
6770 |
},
|
6771 |
"community|aratrust:MentalHealth|0": {
|
6772 |
"hashes": {
|
6773 |
+
"hash_examples": "8e5fc5c4704bd96b",
|
6774 |
+
"hash_full_prompts": "e1e262c050abe215",
|
6775 |
+
"hash_input_tokens": "1413fffc8d02a4d4",
|
6776 |
+
"hash_cont_tokens": "7b399d0f0a9124f1"
|
6777 |
},
|
6778 |
+
"truncated": 0,
|
6779 |
+
"non_truncated": 76,
|
6780 |
+
"padded": 228,
|
6781 |
"non_padded": 0,
|
6782 |
"effective_few_shots": 0.0,
|
6783 |
"num_truncated_few_shots": 0
|
6784 |
},
|
6785 |
"community|aratrust:Offensive|0": {
|
6786 |
"hashes": {
|
6787 |
+
"hash_examples": "5ad4369b7dc5de46",
|
6788 |
+
"hash_full_prompts": "783eb34b50ddc3dc",
|
6789 |
+
"hash_input_tokens": "28abef033a0e7e22",
|
6790 |
+
"hash_cont_tokens": "0cd5015bc3370adf"
|
6791 |
},
|
6792 |
+
"truncated": 0,
|
6793 |
+
"non_truncated": 69,
|
6794 |
+
"padded": 207,
|
6795 |
"non_padded": 0,
|
6796 |
"effective_few_shots": 0.0,
|
6797 |
"num_truncated_few_shots": 0
|
6798 |
},
|
6799 |
"community|aratrust:PhysicalHealth|0": {
|
6800 |
"hashes": {
|
6801 |
+
"hash_examples": "dc2a632e2dcc86db",
|
6802 |
+
"hash_full_prompts": "8368558e325841b5",
|
6803 |
+
"hash_input_tokens": "d286528877a50694",
|
6804 |
+
"hash_cont_tokens": "cb8655dcad91858d"
|
6805 |
},
|
6806 |
+
"truncated": 0,
|
6807 |
+
"non_truncated": 73,
|
6808 |
+
"padded": 210,
|
6809 |
+
"non_padded": 9,
|
6810 |
"effective_few_shots": 0.0,
|
6811 |
"num_truncated_few_shots": 0
|
6812 |
},
|
6813 |
"community|aratrust:Privacy|0": {
|
6814 |
"hashes": {
|
6815 |
+
"hash_examples": "295e35448a39e003",
|
6816 |
+
"hash_full_prompts": "099b63b7ccb2c9a9",
|
6817 |
+
"hash_input_tokens": "ebd5ae9b68922b08",
|
6818 |
+
"hash_cont_tokens": "7f23416c661e2ee5"
|
6819 |
},
|
6820 |
+
"truncated": 0,
|
6821 |
+
"non_truncated": 57,
|
6822 |
+
"padded": 162,
|
6823 |
+
"non_padded": 9,
|
6824 |
"effective_few_shots": 0.0,
|
6825 |
"num_truncated_few_shots": 0
|
6826 |
},
|
6827 |
"community|aratrust:Trustfulness|0": {
|
6828 |
"hashes": {
|
6829 |
+
"hash_examples": "e79ac1ea5439e623",
|
6830 |
+
"hash_full_prompts": "fc6808c8672c7adf",
|
6831 |
+
"hash_input_tokens": "f1e2ab1163d7c1e0",
|
6832 |
+
"hash_cont_tokens": "ff874dba360c1ede"
|
6833 |
},
|
6834 |
+
"truncated": 0,
|
6835 |
+
"non_truncated": 78,
|
6836 |
+
"padded": 228,
|
6837 |
+
"non_padded": 6,
|
6838 |
"effective_few_shots": 0.0,
|
6839 |
"num_truncated_few_shots": 0
|
6840 |
},
|
6841 |
"community|aratrust:Unfairness|0": {
|
6842 |
"hashes": {
|
6843 |
+
"hash_examples": "4ac5dccbfbdc5077",
|
6844 |
+
"hash_full_prompts": "74179b05a376e621",
|
6845 |
+
"hash_input_tokens": "5a105432971a1665",
|
6846 |
+
"hash_cont_tokens": "3e990fe3a474dbc5"
|
6847 |
},
|
6848 |
+
"truncated": 0,
|
6849 |
+
"non_truncated": 55,
|
6850 |
+
"padded": 159,
|
6851 |
+
"non_padded": 6,
|
6852 |
"effective_few_shots": 0.0,
|
6853 |
"num_truncated_few_shots": 0
|
6854 |
},
|