Upload FreedomIntelligence/AceGPT-v2-70B-Chat/results_2025-01-23T13-25-57.976510.json with huggingface_hub
Browse files
FreedomIntelligence/AceGPT-v2-70B-Chat/results_2025-01-23T13-25-57.976510.json
CHANGED
@@ -451,36 +451,36 @@
|
|
451 |
"acc_norm_stderr": 0.025142011474008363
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
-
"
|
455 |
-
"
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
-
"
|
459 |
-
"
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
-
"
|
463 |
-
"
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
-
"
|
467 |
-
"
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
-
"
|
471 |
-
"
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
-
"
|
475 |
-
"
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
-
"
|
479 |
-
"
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
-
"
|
483 |
-
"
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.5758819891300256,
|
@@ -499,8 +499,8 @@
|
|
499 |
"acc_norm_stderr": 0.022507906739534768
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
-
"
|
503 |
-
"
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.6559100902677332,
|
@@ -4797,9 +4797,9 @@
|
|
4797 |
"hf_subset": "Ethics",
|
4798 |
"metric": [
|
4799 |
{
|
4800 |
-
"metric_name": "
|
4801 |
"higher_is_better": true,
|
4802 |
-
"category": "
|
4803 |
"use_case": "1",
|
4804 |
"sample_level_fn": "compute",
|
4805 |
"corpus_level_fn": "mean"
|
@@ -4835,9 +4835,9 @@
|
|
4835 |
"hf_subset": "Illegal",
|
4836 |
"metric": [
|
4837 |
{
|
4838 |
-
"metric_name": "
|
4839 |
"higher_is_better": true,
|
4840 |
-
"category": "
|
4841 |
"use_case": "1",
|
4842 |
"sample_level_fn": "compute",
|
4843 |
"corpus_level_fn": "mean"
|
@@ -4873,9 +4873,9 @@
|
|
4873 |
"hf_subset": "MentalHealth",
|
4874 |
"metric": [
|
4875 |
{
|
4876 |
-
"metric_name": "
|
4877 |
"higher_is_better": true,
|
4878 |
-
"category": "
|
4879 |
"use_case": "1",
|
4880 |
"sample_level_fn": "compute",
|
4881 |
"corpus_level_fn": "mean"
|
@@ -4911,9 +4911,9 @@
|
|
4911 |
"hf_subset": "Offensive",
|
4912 |
"metric": [
|
4913 |
{
|
4914 |
-
"metric_name": "
|
4915 |
"higher_is_better": true,
|
4916 |
-
"category": "
|
4917 |
"use_case": "1",
|
4918 |
"sample_level_fn": "compute",
|
4919 |
"corpus_level_fn": "mean"
|
@@ -4949,9 +4949,9 @@
|
|
4949 |
"hf_subset": "PhysicalHealth",
|
4950 |
"metric": [
|
4951 |
{
|
4952 |
-
"metric_name": "
|
4953 |
"higher_is_better": true,
|
4954 |
-
"category": "
|
4955 |
"use_case": "1",
|
4956 |
"sample_level_fn": "compute",
|
4957 |
"corpus_level_fn": "mean"
|
@@ -4987,9 +4987,9 @@
|
|
4987 |
"hf_subset": "Privacy",
|
4988 |
"metric": [
|
4989 |
{
|
4990 |
-
"metric_name": "
|
4991 |
"higher_is_better": true,
|
4992 |
-
"category": "
|
4993 |
"use_case": "1",
|
4994 |
"sample_level_fn": "compute",
|
4995 |
"corpus_level_fn": "mean"
|
@@ -5025,9 +5025,9 @@
|
|
5025 |
"hf_subset": "Trustfulness",
|
5026 |
"metric": [
|
5027 |
{
|
5028 |
-
"metric_name": "
|
5029 |
"higher_is_better": true,
|
5030 |
-
"category": "
|
5031 |
"use_case": "1",
|
5032 |
"sample_level_fn": "compute",
|
5033 |
"corpus_level_fn": "mean"
|
@@ -5063,9 +5063,9 @@
|
|
5063 |
"hf_subset": "Unfairness",
|
5064 |
"metric": [
|
5065 |
{
|
5066 |
-
"metric_name": "
|
5067 |
"higher_is_better": true,
|
5068 |
-
"category": "
|
5069 |
"use_case": "1",
|
5070 |
"sample_level_fn": "compute",
|
5071 |
"corpus_level_fn": "mean"
|
@@ -6742,113 +6742,113 @@
|
|
6742 |
},
|
6743 |
"community|aratrust:Ethics|0": {
|
6744 |
"hashes": {
|
6745 |
-
"hash_examples": "
|
6746 |
-
"hash_full_prompts": "
|
6747 |
-
"hash_input_tokens": "
|
6748 |
-
"hash_cont_tokens": "
|
6749 |
},
|
6750 |
-
"truncated":
|
6751 |
-
"non_truncated":
|
6752 |
-
"padded":
|
6753 |
"non_padded": 0,
|
6754 |
"effective_few_shots": 0.0,
|
6755 |
"num_truncated_few_shots": 0
|
6756 |
},
|
6757 |
"community|aratrust:Illegal|0": {
|
6758 |
"hashes": {
|
6759 |
-
"hash_examples": "
|
6760 |
-
"hash_full_prompts": "
|
6761 |
-
"hash_input_tokens": "
|
6762 |
-
"hash_cont_tokens": "
|
6763 |
},
|
6764 |
-
"truncated":
|
6765 |
-
"non_truncated":
|
6766 |
-
"padded":
|
6767 |
"non_padded": 0,
|
6768 |
"effective_few_shots": 0.0,
|
6769 |
"num_truncated_few_shots": 0
|
6770 |
},
|
6771 |
"community|aratrust:MentalHealth|0": {
|
6772 |
"hashes": {
|
6773 |
-
"hash_examples": "
|
6774 |
-
"hash_full_prompts": "
|
6775 |
-
"hash_input_tokens": "
|
6776 |
-
"hash_cont_tokens": "
|
6777 |
},
|
6778 |
-
"truncated":
|
6779 |
-
"non_truncated":
|
6780 |
-
"padded":
|
6781 |
"non_padded": 0,
|
6782 |
"effective_few_shots": 0.0,
|
6783 |
"num_truncated_few_shots": 0
|
6784 |
},
|
6785 |
"community|aratrust:Offensive|0": {
|
6786 |
"hashes": {
|
6787 |
-
"hash_examples": "
|
6788 |
-
"hash_full_prompts": "
|
6789 |
-
"hash_input_tokens": "
|
6790 |
-
"hash_cont_tokens": "
|
6791 |
},
|
6792 |
-
"truncated":
|
6793 |
-
"non_truncated":
|
6794 |
-
"padded":
|
6795 |
-
"non_padded":
|
6796 |
"effective_few_shots": 0.0,
|
6797 |
"num_truncated_few_shots": 0
|
6798 |
},
|
6799 |
"community|aratrust:PhysicalHealth|0": {
|
6800 |
"hashes": {
|
6801 |
-
"hash_examples": "
|
6802 |
-
"hash_full_prompts": "
|
6803 |
-
"hash_input_tokens": "
|
6804 |
-
"hash_cont_tokens": "
|
6805 |
},
|
6806 |
-
"truncated":
|
6807 |
-
"non_truncated":
|
6808 |
-
"padded":
|
6809 |
-
"non_padded":
|
6810 |
"effective_few_shots": 0.0,
|
6811 |
"num_truncated_few_shots": 0
|
6812 |
},
|
6813 |
"community|aratrust:Privacy|0": {
|
6814 |
"hashes": {
|
6815 |
-
"hash_examples": "
|
6816 |
-
"hash_full_prompts": "
|
6817 |
-
"hash_input_tokens": "
|
6818 |
-
"hash_cont_tokens": "
|
6819 |
},
|
6820 |
-
"truncated":
|
6821 |
-
"non_truncated":
|
6822 |
-
"padded":
|
6823 |
-
"non_padded":
|
6824 |
"effective_few_shots": 0.0,
|
6825 |
"num_truncated_few_shots": 0
|
6826 |
},
|
6827 |
"community|aratrust:Trustfulness|0": {
|
6828 |
"hashes": {
|
6829 |
-
"hash_examples": "
|
6830 |
-
"hash_full_prompts": "
|
6831 |
-
"hash_input_tokens": "
|
6832 |
-
"hash_cont_tokens": "
|
6833 |
},
|
6834 |
"truncated": 0,
|
6835 |
"non_truncated": 78,
|
6836 |
-
"padded":
|
6837 |
"non_padded": 0,
|
6838 |
"effective_few_shots": 0.0,
|
6839 |
"num_truncated_few_shots": 0
|
6840 |
},
|
6841 |
"community|aratrust:Unfairness|0": {
|
6842 |
"hashes": {
|
6843 |
-
"hash_examples": "
|
6844 |
-
"hash_full_prompts": "
|
6845 |
-
"hash_input_tokens": "
|
6846 |
-
"hash_cont_tokens": "
|
6847 |
},
|
6848 |
-
"truncated":
|
6849 |
-
"non_truncated":
|
6850 |
-
"padded":
|
6851 |
-
"non_padded":
|
6852 |
"effective_few_shots": 0.0,
|
6853 |
"num_truncated_few_shots": 0
|
6854 |
},
|
|
|
451 |
"acc_norm_stderr": 0.025142011474008363
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
+
"f1": 0.7166666666666667,
|
455 |
+
"f1_stderr": 0.05866531842313122
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
+
"f1": 0.9811320754716981,
|
459 |
+
"f1_stderr": 0.01886792452830188
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
+
"f1": 0.9342105263157895,
|
463 |
+
"f1_stderr": 0.028626612056281944
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
+
"f1": 0.855072463768116,
|
467 |
+
"f1_stderr": 0.04268963668731211
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
+
"f1": 0.9178082191780822,
|
471 |
+
"f1_stderr": 0.03236860017981239
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
+
"f1": 0.9473684210526315,
|
475 |
+
"f1_stderr": 0.029839300500728496
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
+
"f1": 0.8076923076923077,
|
479 |
+
"f1_stderr": 0.044913400624703226
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
+
"f1": 0.9454545454545454,
|
483 |
+
"f1_stderr": 0.030903148567228995
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.5758819891300256,
|
|
|
499 |
"acc_norm_stderr": 0.022507906739534768
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
+
"f1": 0.8881756531999796,
|
503 |
+
"f1_stderr": 0.03585924269593753
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.6559100902677332,
|
|
|
4797 |
"hf_subset": "Ethics",
|
4798 |
"metric": [
|
4799 |
{
|
4800 |
+
"metric_name": "f1",
|
4801 |
"higher_is_better": true,
|
4802 |
+
"category": "3",
|
4803 |
"use_case": "1",
|
4804 |
"sample_level_fn": "compute",
|
4805 |
"corpus_level_fn": "mean"
|
|
|
4835 |
"hf_subset": "Illegal",
|
4836 |
"metric": [
|
4837 |
{
|
4838 |
+
"metric_name": "f1",
|
4839 |
"higher_is_better": true,
|
4840 |
+
"category": "3",
|
4841 |
"use_case": "1",
|
4842 |
"sample_level_fn": "compute",
|
4843 |
"corpus_level_fn": "mean"
|
|
|
4873 |
"hf_subset": "MentalHealth",
|
4874 |
"metric": [
|
4875 |
{
|
4876 |
+
"metric_name": "f1",
|
4877 |
"higher_is_better": true,
|
4878 |
+
"category": "3",
|
4879 |
"use_case": "1",
|
4880 |
"sample_level_fn": "compute",
|
4881 |
"corpus_level_fn": "mean"
|
|
|
4911 |
"hf_subset": "Offensive",
|
4912 |
"metric": [
|
4913 |
{
|
4914 |
+
"metric_name": "f1",
|
4915 |
"higher_is_better": true,
|
4916 |
+
"category": "3",
|
4917 |
"use_case": "1",
|
4918 |
"sample_level_fn": "compute",
|
4919 |
"corpus_level_fn": "mean"
|
|
|
4949 |
"hf_subset": "PhysicalHealth",
|
4950 |
"metric": [
|
4951 |
{
|
4952 |
+
"metric_name": "f1",
|
4953 |
"higher_is_better": true,
|
4954 |
+
"category": "3",
|
4955 |
"use_case": "1",
|
4956 |
"sample_level_fn": "compute",
|
4957 |
"corpus_level_fn": "mean"
|
|
|
4987 |
"hf_subset": "Privacy",
|
4988 |
"metric": [
|
4989 |
{
|
4990 |
+
"metric_name": "f1",
|
4991 |
"higher_is_better": true,
|
4992 |
+
"category": "3",
|
4993 |
"use_case": "1",
|
4994 |
"sample_level_fn": "compute",
|
4995 |
"corpus_level_fn": "mean"
|
|
|
5025 |
"hf_subset": "Trustfulness",
|
5026 |
"metric": [
|
5027 |
{
|
5028 |
+
"metric_name": "f1",
|
5029 |
"higher_is_better": true,
|
5030 |
+
"category": "3",
|
5031 |
"use_case": "1",
|
5032 |
"sample_level_fn": "compute",
|
5033 |
"corpus_level_fn": "mean"
|
|
|
5063 |
"hf_subset": "Unfairness",
|
5064 |
"metric": [
|
5065 |
{
|
5066 |
+
"metric_name": "f1",
|
5067 |
"higher_is_better": true,
|
5068 |
+
"category": "3",
|
5069 |
"use_case": "1",
|
5070 |
"sample_level_fn": "compute",
|
5071 |
"corpus_level_fn": "mean"
|
|
|
6742 |
},
|
6743 |
"community|aratrust:Ethics|0": {
|
6744 |
"hashes": {
|
6745 |
+
"hash_examples": "b77354655caca219",
|
6746 |
+
"hash_full_prompts": "b77354655caca219",
|
6747 |
+
"hash_input_tokens": "efb34e35cf3d951f",
|
6748 |
+
"hash_cont_tokens": "f0f3902a13dca49c"
|
6749 |
},
|
6750 |
+
"truncated": 3,
|
6751 |
+
"non_truncated": 57,
|
6752 |
+
"padded": 60,
|
6753 |
"non_padded": 0,
|
6754 |
"effective_few_shots": 0.0,
|
6755 |
"num_truncated_few_shots": 0
|
6756 |
},
|
6757 |
"community|aratrust:Illegal|0": {
|
6758 |
"hashes": {
|
6759 |
+
"hash_examples": "daa90cfb03dd9ed8",
|
6760 |
+
"hash_full_prompts": "daa90cfb03dd9ed8",
|
6761 |
+
"hash_input_tokens": "00eb9d4b283303b9",
|
6762 |
+
"hash_cont_tokens": "bd9141df644930ea"
|
6763 |
},
|
6764 |
+
"truncated": 4,
|
6765 |
+
"non_truncated": 49,
|
6766 |
+
"padded": 53,
|
6767 |
"non_padded": 0,
|
6768 |
"effective_few_shots": 0.0,
|
6769 |
"num_truncated_few_shots": 0
|
6770 |
},
|
6771 |
"community|aratrust:MentalHealth|0": {
|
6772 |
"hashes": {
|
6773 |
+
"hash_examples": "ca046355c96d95d9",
|
6774 |
+
"hash_full_prompts": "ca046355c96d95d9",
|
6775 |
+
"hash_input_tokens": "26e531eadea9c309",
|
6776 |
+
"hash_cont_tokens": "d83c9e9261981359"
|
6777 |
},
|
6778 |
+
"truncated": 20,
|
6779 |
+
"non_truncated": 56,
|
6780 |
+
"padded": 76,
|
6781 |
"non_padded": 0,
|
6782 |
"effective_few_shots": 0.0,
|
6783 |
"num_truncated_few_shots": 0
|
6784 |
},
|
6785 |
"community|aratrust:Offensive|0": {
|
6786 |
"hashes": {
|
6787 |
+
"hash_examples": "6ff77d23c0f3113d",
|
6788 |
+
"hash_full_prompts": "6ff77d23c0f3113d",
|
6789 |
+
"hash_input_tokens": "985ce83fc708a044",
|
6790 |
+
"hash_cont_tokens": "20e82e9b7ddd0d4b"
|
6791 |
},
|
6792 |
+
"truncated": 1,
|
6793 |
+
"non_truncated": 68,
|
6794 |
+
"padded": 69,
|
6795 |
+
"non_padded": 0,
|
6796 |
"effective_few_shots": 0.0,
|
6797 |
"num_truncated_few_shots": 0
|
6798 |
},
|
6799 |
"community|aratrust:PhysicalHealth|0": {
|
6800 |
"hashes": {
|
6801 |
+
"hash_examples": "085db2421f8abf29",
|
6802 |
+
"hash_full_prompts": "085db2421f8abf29",
|
6803 |
+
"hash_input_tokens": "97193d5a7cda8ca3",
|
6804 |
+
"hash_cont_tokens": "2e8393b047883759"
|
6805 |
},
|
6806 |
+
"truncated": 14,
|
6807 |
+
"non_truncated": 59,
|
6808 |
+
"padded": 73,
|
6809 |
+
"non_padded": 0,
|
6810 |
"effective_few_shots": 0.0,
|
6811 |
"num_truncated_few_shots": 0
|
6812 |
},
|
6813 |
"community|aratrust:Privacy|0": {
|
6814 |
"hashes": {
|
6815 |
+
"hash_examples": "78f4d16753b18c49",
|
6816 |
+
"hash_full_prompts": "78f4d16753b18c49",
|
6817 |
+
"hash_input_tokens": "95ad0fd5409265ec",
|
6818 |
+
"hash_cont_tokens": "e56be8c96a51d1cb"
|
6819 |
},
|
6820 |
+
"truncated": 12,
|
6821 |
+
"non_truncated": 45,
|
6822 |
+
"padded": 56,
|
6823 |
+
"non_padded": 1,
|
6824 |
"effective_few_shots": 0.0,
|
6825 |
"num_truncated_few_shots": 0
|
6826 |
},
|
6827 |
"community|aratrust:Trustfulness|0": {
|
6828 |
"hashes": {
|
6829 |
+
"hash_examples": "373f72b4e30243c4",
|
6830 |
+
"hash_full_prompts": "373f72b4e30243c4",
|
6831 |
+
"hash_input_tokens": "d958cc1ead832ac4",
|
6832 |
+
"hash_cont_tokens": "201a617b7ed50986"
|
6833 |
},
|
6834 |
"truncated": 0,
|
6835 |
"non_truncated": 78,
|
6836 |
+
"padded": 78,
|
6837 |
"non_padded": 0,
|
6838 |
"effective_few_shots": 0.0,
|
6839 |
"num_truncated_few_shots": 0
|
6840 |
},
|
6841 |
"community|aratrust:Unfairness|0": {
|
6842 |
"hashes": {
|
6843 |
+
"hash_examples": "51fa7940e42ffcc6",
|
6844 |
+
"hash_full_prompts": "51fa7940e42ffcc6",
|
6845 |
+
"hash_input_tokens": "cfb913451baca302",
|
6846 |
+
"hash_cont_tokens": "7f4248ad5aef07fa"
|
6847 |
},
|
6848 |
+
"truncated": 2,
|
6849 |
+
"non_truncated": 53,
|
6850 |
+
"padded": 55,
|
6851 |
+
"non_padded": 0,
|
6852 |
"effective_few_shots": 0.0,
|
6853 |
"num_truncated_few_shots": 0
|
6854 |
},
|