Upload inceptionai/jais-adapted-70b/results_2025-01-23T14-14-09.083151.json with huggingface_hub
Browse files
inceptionai/jais-adapted-70b/results_2025-01-23T14-14-09.083151.json
CHANGED
@@ -451,36 +451,36 @@
|
|
451 |
"acc_norm_stderr": 0.024775172239613903
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
-
"
|
455 |
-
"
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
-
"
|
459 |
-
"
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
-
"
|
463 |
-
"
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
-
"
|
467 |
-
"
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
-
"
|
471 |
-
"
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
-
"
|
475 |
-
"
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
-
"
|
479 |
-
"
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
-
"
|
483 |
-
"
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.4962665228816058,
|
@@ -499,18 +499,14 @@
|
|
499 |
"acc_norm_stderr": 0.022475330502548973
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
-
"
|
503 |
-
"
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.49404301593823985,
|
507 |
"acc_norm_stderr": 0.0332477131303554,
|
508 |
"f1": 0.7053860411193431,
|
509 |
"f1_stderr": 0.05588622012932623
|
510 |
-
},
|
511 |
-
"community|alrage_qa|0": {
|
512 |
-
"llm_as_judge": 0.5528490028490017,
|
513 |
-
"llm_as_judge_stderr": 0.00023093948894035965
|
514 |
}
|
515 |
},
|
516 |
"versions": {
|
@@ -630,8 +626,7 @@
|
|
630 |
"community|aratrust:Trustfulness|0": 0,
|
631 |
"community|aratrust:Unfairness|0": 0,
|
632 |
"community|madinah_qa:Arabic Language (General)|0": 0,
|
633 |
-
"community|madinah_qa:Arabic Language (Grammar)|0": 0
|
634 |
-
"community|alrage_qa|0": 0
|
635 |
},
|
636 |
"config_tasks": {
|
637 |
"community|alghafa:mcq_exams_test_ar": {
|
@@ -4797,9 +4792,9 @@
|
|
4797 |
"hf_subset": "Ethics",
|
4798 |
"metric": [
|
4799 |
{
|
4800 |
-
"metric_name": "
|
4801 |
"higher_is_better": true,
|
4802 |
-
"category": "
|
4803 |
"use_case": "1",
|
4804 |
"sample_level_fn": "compute",
|
4805 |
"corpus_level_fn": "mean"
|
@@ -4835,9 +4830,9 @@
|
|
4835 |
"hf_subset": "Illegal",
|
4836 |
"metric": [
|
4837 |
{
|
4838 |
-
"metric_name": "
|
4839 |
"higher_is_better": true,
|
4840 |
-
"category": "
|
4841 |
"use_case": "1",
|
4842 |
"sample_level_fn": "compute",
|
4843 |
"corpus_level_fn": "mean"
|
@@ -4873,9 +4868,9 @@
|
|
4873 |
"hf_subset": "MentalHealth",
|
4874 |
"metric": [
|
4875 |
{
|
4876 |
-
"metric_name": "
|
4877 |
"higher_is_better": true,
|
4878 |
-
"category": "
|
4879 |
"use_case": "1",
|
4880 |
"sample_level_fn": "compute",
|
4881 |
"corpus_level_fn": "mean"
|
@@ -4911,9 +4906,9 @@
|
|
4911 |
"hf_subset": "Offensive",
|
4912 |
"metric": [
|
4913 |
{
|
4914 |
-
"metric_name": "
|
4915 |
"higher_is_better": true,
|
4916 |
-
"category": "
|
4917 |
"use_case": "1",
|
4918 |
"sample_level_fn": "compute",
|
4919 |
"corpus_level_fn": "mean"
|
@@ -4949,9 +4944,9 @@
|
|
4949 |
"hf_subset": "PhysicalHealth",
|
4950 |
"metric": [
|
4951 |
{
|
4952 |
-
"metric_name": "
|
4953 |
"higher_is_better": true,
|
4954 |
-
"category": "
|
4955 |
"use_case": "1",
|
4956 |
"sample_level_fn": "compute",
|
4957 |
"corpus_level_fn": "mean"
|
@@ -4987,9 +4982,9 @@
|
|
4987 |
"hf_subset": "Privacy",
|
4988 |
"metric": [
|
4989 |
{
|
4990 |
-
"metric_name": "
|
4991 |
"higher_is_better": true,
|
4992 |
-
"category": "
|
4993 |
"use_case": "1",
|
4994 |
"sample_level_fn": "compute",
|
4995 |
"corpus_level_fn": "mean"
|
@@ -5025,9 +5020,9 @@
|
|
5025 |
"hf_subset": "Trustfulness",
|
5026 |
"metric": [
|
5027 |
{
|
5028 |
-
"metric_name": "
|
5029 |
"higher_is_better": true,
|
5030 |
-
"category": "
|
5031 |
"use_case": "1",
|
5032 |
"sample_level_fn": "compute",
|
5033 |
"corpus_level_fn": "mean"
|
@@ -5063,9 +5058,9 @@
|
|
5063 |
"hf_subset": "Unfairness",
|
5064 |
"metric": [
|
5065 |
{
|
5066 |
-
"metric_name": "
|
5067 |
"higher_is_better": true,
|
5068 |
-
"category": "
|
5069 |
"use_case": "1",
|
5070 |
"sample_level_fn": "compute",
|
5071 |
"corpus_level_fn": "mean"
|
@@ -5173,44 +5168,6 @@
|
|
5173 |
"effective_num_docs": 365,
|
5174 |
"must_remove_duplicate_docs": false,
|
5175 |
"version": 0
|
5176 |
-
},
|
5177 |
-
"community|alrage_qa": {
|
5178 |
-
"name": "alrage_qa",
|
5179 |
-
"prompt_function": "qa_prompt_arabic",
|
5180 |
-
"hf_repo": "OALL/ALRAGE",
|
5181 |
-
"hf_subset": null,
|
5182 |
-
"metric": [
|
5183 |
-
{
|
5184 |
-
"metric_name": "llm_as_judge",
|
5185 |
-
"higher_is_better": true,
|
5186 |
-
"category": "7",
|
5187 |
-
"use_case": "10",
|
5188 |
-
"sample_level_fn": "_sample_level_fn",
|
5189 |
-
"corpus_level_fn": "aggregate_scores"
|
5190 |
-
}
|
5191 |
-
],
|
5192 |
-
"hf_revision": null,
|
5193 |
-
"hf_filter": null,
|
5194 |
-
"hf_avail_splits": [
|
5195 |
-
"train"
|
5196 |
-
],
|
5197 |
-
"trust_dataset": true,
|
5198 |
-
"evaluation_splits": [
|
5199 |
-
"train"
|
5200 |
-
],
|
5201 |
-
"few_shots_split": null,
|
5202 |
-
"few_shots_select": null,
|
5203 |
-
"generation_size": 200,
|
5204 |
-
"generation_grammar": null,
|
5205 |
-
"stop_sequence": [],
|
5206 |
-
"num_samples": null,
|
5207 |
-
"suite": [
|
5208 |
-
"community"
|
5209 |
-
],
|
5210 |
-
"original_num_docs": 2106,
|
5211 |
-
"effective_num_docs": 2106,
|
5212 |
-
"must_remove_duplicate_docs": false,
|
5213 |
-
"version": 0
|
5214 |
}
|
5215 |
},
|
5216 |
"summary_tasks": {
|
@@ -6742,127 +6699,113 @@
|
|
6742 |
},
|
6743 |
"community|aratrust:Ethics|0": {
|
6744 |
"hashes": {
|
6745 |
-
"hash_examples": "
|
6746 |
-
"hash_full_prompts": "
|
6747 |
-
"hash_input_tokens": "
|
6748 |
-
"hash_cont_tokens": "
|
6749 |
},
|
6750 |
-
"truncated":
|
6751 |
-
"non_truncated":
|
6752 |
-
"padded":
|
6753 |
"non_padded": 0,
|
6754 |
"effective_few_shots": 0.0,
|
6755 |
"num_truncated_few_shots": 0
|
6756 |
},
|
6757 |
"community|aratrust:Illegal|0": {
|
6758 |
"hashes": {
|
6759 |
-
"hash_examples": "
|
6760 |
-
"hash_full_prompts": "
|
6761 |
-
"hash_input_tokens": "
|
6762 |
-
"hash_cont_tokens": "
|
6763 |
},
|
6764 |
-
"truncated":
|
6765 |
-
"non_truncated":
|
6766 |
-
"padded":
|
6767 |
-
"non_padded":
|
6768 |
"effective_few_shots": 0.0,
|
6769 |
"num_truncated_few_shots": 0
|
6770 |
},
|
6771 |
"community|aratrust:MentalHealth|0": {
|
6772 |
"hashes": {
|
6773 |
-
"hash_examples": "
|
6774 |
-
"hash_full_prompts": "
|
6775 |
-
"hash_input_tokens": "
|
6776 |
-
"hash_cont_tokens": "
|
6777 |
},
|
6778 |
-
"truncated":
|
6779 |
-
"non_truncated":
|
6780 |
-
"padded":
|
6781 |
-
"non_padded":
|
6782 |
"effective_few_shots": 0.0,
|
6783 |
"num_truncated_few_shots": 0
|
6784 |
},
|
6785 |
"community|aratrust:Offensive|0": {
|
6786 |
"hashes": {
|
6787 |
-
"hash_examples": "
|
6788 |
-
"hash_full_prompts": "
|
6789 |
-
"hash_input_tokens": "
|
6790 |
-
"hash_cont_tokens": "
|
6791 |
},
|
6792 |
-
"truncated":
|
6793 |
-
"non_truncated":
|
6794 |
-
"padded":
|
6795 |
-
"non_padded":
|
6796 |
"effective_few_shots": 0.0,
|
6797 |
"num_truncated_few_shots": 0
|
6798 |
},
|
6799 |
"community|aratrust:PhysicalHealth|0": {
|
6800 |
"hashes": {
|
6801 |
-
"hash_examples": "
|
6802 |
-
"hash_full_prompts": "
|
6803 |
-
"hash_input_tokens": "
|
6804 |
-
"hash_cont_tokens": "
|
6805 |
},
|
6806 |
-
"truncated":
|
6807 |
-
"non_truncated":
|
6808 |
-
"padded":
|
6809 |
-
"non_padded":
|
6810 |
"effective_few_shots": 0.0,
|
6811 |
"num_truncated_few_shots": 0
|
6812 |
},
|
6813 |
"community|aratrust:Privacy|0": {
|
6814 |
"hashes": {
|
6815 |
-
"hash_examples": "
|
6816 |
-
"hash_full_prompts": "
|
6817 |
-
"hash_input_tokens": "
|
6818 |
-
"hash_cont_tokens": "
|
6819 |
},
|
6820 |
-
"truncated":
|
6821 |
-
"non_truncated":
|
6822 |
-
"padded":
|
6823 |
-
"non_padded":
|
6824 |
"effective_few_shots": 0.0,
|
6825 |
"num_truncated_few_shots": 0
|
6826 |
},
|
6827 |
"community|aratrust:Trustfulness|0": {
|
6828 |
"hashes": {
|
6829 |
-
"hash_examples": "
|
6830 |
-
"hash_full_prompts": "
|
6831 |
-
"hash_input_tokens": "
|
6832 |
-
"hash_cont_tokens": "
|
6833 |
},
|
6834 |
"truncated": 0,
|
6835 |
"non_truncated": 78,
|
6836 |
-
"padded":
|
6837 |
-
"non_padded":
|
6838 |
"effective_few_shots": 0.0,
|
6839 |
"num_truncated_few_shots": 0
|
6840 |
},
|
6841 |
"community|aratrust:Unfairness|0": {
|
6842 |
"hashes": {
|
6843 |
-
"hash_examples": "
|
6844 |
-
"hash_full_prompts": "
|
6845 |
-
"hash_input_tokens": "
|
6846 |
-
"hash_cont_tokens": "
|
6847 |
},
|
6848 |
-
"truncated":
|
6849 |
-
"non_truncated":
|
6850 |
-
"padded":
|
6851 |
-
"non_padded":
|
6852 |
-
"effective_few_shots": 0.0,
|
6853 |
-
"num_truncated_few_shots": 0
|
6854 |
-
},
|
6855 |
-
"community|alrage_qa|0": {
|
6856 |
-
"hashes": {
|
6857 |
-
"hash_examples": "3edbbe22cabd4160",
|
6858 |
-
"hash_full_prompts": "3edbbe22cabd4160",
|
6859 |
-
"hash_input_tokens": "55453ef00381efe3",
|
6860 |
-
"hash_cont_tokens": "0ac3fd16b70a4fe4"
|
6861 |
-
},
|
6862 |
-
"truncated": 2106,
|
6863 |
-
"non_truncated": 0,
|
6864 |
-
"padded": 2106,
|
6865 |
-
"non_padded": 0,
|
6866 |
"effective_few_shots": 0.0,
|
6867 |
"num_truncated_few_shots": 0
|
6868 |
}
|
|
|
451 |
"acc_norm_stderr": 0.024775172239613903
|
452 |
},
|
453 |
"community|aratrust:Ethics|0": {
|
454 |
+
"acc_norm": 0.6333333333333333,
|
455 |
+
"acc_norm_stderr": 0.06273730842308473
|
456 |
},
|
457 |
"community|aratrust:Illegal|0": {
|
458 |
+
"acc_norm": 0.6981132075471698,
|
459 |
+
"acc_norm_stderr": 0.06366244470090368
|
460 |
},
|
461 |
"community|aratrust:MentalHealth|0": {
|
462 |
+
"acc_norm": 0.8026315789473685,
|
463 |
+
"acc_norm_stderr": 0.045958550517297296
|
464 |
},
|
465 |
"community|aratrust:Offensive|0": {
|
466 |
+
"acc_norm": 0.8115942028985508,
|
467 |
+
"acc_norm_stderr": 0.04742006474057419
|
468 |
},
|
469 |
"community|aratrust:PhysicalHealth|0": {
|
470 |
+
"acc_norm": 0.7671232876712328,
|
471 |
+
"acc_norm_stderr": 0.04981147084308546
|
472 |
},
|
473 |
"community|aratrust:Privacy|0": {
|
474 |
+
"acc_norm": 0.8596491228070176,
|
475 |
+
"acc_norm_stderr": 0.04641668966779981
|
476 |
},
|
477 |
"community|aratrust:Trustfulness|0": {
|
478 |
+
"acc_norm": 0.6794871794871795,
|
479 |
+
"acc_norm_stderr": 0.053182405107174306
|
480 |
},
|
481 |
"community|aratrust:Unfairness|0": {
|
482 |
+
"acc_norm": 0.7454545454545455,
|
483 |
+
"acc_norm_stderr": 0.059278386873217015
|
484 |
},
|
485 |
"community|alghafa:_average|0": {
|
486 |
"acc_norm": 0.4962665228816058,
|
|
|
499 |
"acc_norm_stderr": 0.022475330502548973
|
500 |
},
|
501 |
"community|aratrust:_average|0": {
|
502 |
+
"acc_norm": 0.7496733072682997,
|
503 |
+
"acc_norm_stderr": 0.05355841510914207
|
504 |
},
|
505 |
"all": {
|
506 |
"acc_norm": 0.49404301593823985,
|
507 |
"acc_norm_stderr": 0.0332477131303554,
|
508 |
"f1": 0.7053860411193431,
|
509 |
"f1_stderr": 0.05588622012932623
|
|
|
|
|
|
|
|
|
510 |
}
|
511 |
},
|
512 |
"versions": {
|
|
|
626 |
"community|aratrust:Trustfulness|0": 0,
|
627 |
"community|aratrust:Unfairness|0": 0,
|
628 |
"community|madinah_qa:Arabic Language (General)|0": 0,
|
629 |
+
"community|madinah_qa:Arabic Language (Grammar)|0": 0
|
|
|
630 |
},
|
631 |
"config_tasks": {
|
632 |
"community|alghafa:mcq_exams_test_ar": {
|
|
|
4792 |
"hf_subset": "Ethics",
|
4793 |
"metric": [
|
4794 |
{
|
4795 |
+
"metric_name": "acc_norm",
|
4796 |
"higher_is_better": true,
|
4797 |
+
"category": "8",
|
4798 |
"use_case": "1",
|
4799 |
"sample_level_fn": "compute",
|
4800 |
"corpus_level_fn": "mean"
|
|
|
4830 |
"hf_subset": "Illegal",
|
4831 |
"metric": [
|
4832 |
{
|
4833 |
+
"metric_name": "acc_norm",
|
4834 |
"higher_is_better": true,
|
4835 |
+
"category": "8",
|
4836 |
"use_case": "1",
|
4837 |
"sample_level_fn": "compute",
|
4838 |
"corpus_level_fn": "mean"
|
|
|
4868 |
"hf_subset": "MentalHealth",
|
4869 |
"metric": [
|
4870 |
{
|
4871 |
+
"metric_name": "acc_norm",
|
4872 |
"higher_is_better": true,
|
4873 |
+
"category": "8",
|
4874 |
"use_case": "1",
|
4875 |
"sample_level_fn": "compute",
|
4876 |
"corpus_level_fn": "mean"
|
|
|
4906 |
"hf_subset": "Offensive",
|
4907 |
"metric": [
|
4908 |
{
|
4909 |
+
"metric_name": "acc_norm",
|
4910 |
"higher_is_better": true,
|
4911 |
+
"category": "8",
|
4912 |
"use_case": "1",
|
4913 |
"sample_level_fn": "compute",
|
4914 |
"corpus_level_fn": "mean"
|
|
|
4944 |
"hf_subset": "PhysicalHealth",
|
4945 |
"metric": [
|
4946 |
{
|
4947 |
+
"metric_name": "acc_norm",
|
4948 |
"higher_is_better": true,
|
4949 |
+
"category": "8",
|
4950 |
"use_case": "1",
|
4951 |
"sample_level_fn": "compute",
|
4952 |
"corpus_level_fn": "mean"
|
|
|
4982 |
"hf_subset": "Privacy",
|
4983 |
"metric": [
|
4984 |
{
|
4985 |
+
"metric_name": "acc_norm",
|
4986 |
"higher_is_better": true,
|
4987 |
+
"category": "8",
|
4988 |
"use_case": "1",
|
4989 |
"sample_level_fn": "compute",
|
4990 |
"corpus_level_fn": "mean"
|
|
|
5020 |
"hf_subset": "Trustfulness",
|
5021 |
"metric": [
|
5022 |
{
|
5023 |
+
"metric_name": "acc_norm",
|
5024 |
"higher_is_better": true,
|
5025 |
+
"category": "8",
|
5026 |
"use_case": "1",
|
5027 |
"sample_level_fn": "compute",
|
5028 |
"corpus_level_fn": "mean"
|
|
|
5058 |
"hf_subset": "Unfairness",
|
5059 |
"metric": [
|
5060 |
{
|
5061 |
+
"metric_name": "acc_norm",
|
5062 |
"higher_is_better": true,
|
5063 |
+
"category": "8",
|
5064 |
"use_case": "1",
|
5065 |
"sample_level_fn": "compute",
|
5066 |
"corpus_level_fn": "mean"
|
|
|
5168 |
"effective_num_docs": 365,
|
5169 |
"must_remove_duplicate_docs": false,
|
5170 |
"version": 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5171 |
}
|
5172 |
},
|
5173 |
"summary_tasks": {
|
|
|
6699 |
},
|
6700 |
"community|aratrust:Ethics|0": {
|
6701 |
"hashes": {
|
6702 |
+
"hash_examples": "5d32da36271c5eb4",
|
6703 |
+
"hash_full_prompts": "5d32da36271c5eb4",
|
6704 |
+
"hash_input_tokens": "162a7aecdae8a92b",
|
6705 |
+
"hash_cont_tokens": "9403afe6be071fd3"
|
6706 |
},
|
6707 |
+
"truncated": 0,
|
6708 |
+
"non_truncated": 60,
|
6709 |
+
"padded": 180,
|
6710 |
"non_padded": 0,
|
6711 |
"effective_few_shots": 0.0,
|
6712 |
"num_truncated_few_shots": 0
|
6713 |
},
|
6714 |
"community|aratrust:Illegal|0": {
|
6715 |
"hashes": {
|
6716 |
+
"hash_examples": "0c07f1f100f2d0e8",
|
6717 |
+
"hash_full_prompts": "0c07f1f100f2d0e8",
|
6718 |
+
"hash_input_tokens": "47a9fae978f1a839",
|
6719 |
+
"hash_cont_tokens": "c4c1d582c3de428f"
|
6720 |
},
|
6721 |
+
"truncated": 0,
|
6722 |
+
"non_truncated": 53,
|
6723 |
+
"padded": 158,
|
6724 |
+
"non_padded": 1,
|
6725 |
"effective_few_shots": 0.0,
|
6726 |
"num_truncated_few_shots": 0
|
6727 |
},
|
6728 |
"community|aratrust:MentalHealth|0": {
|
6729 |
"hashes": {
|
6730 |
+
"hash_examples": "8e5fc5c4704bd96b",
|
6731 |
+
"hash_full_prompts": "8e5fc5c4704bd96b",
|
6732 |
+
"hash_input_tokens": "0cf3a17870ab9cc4",
|
6733 |
+
"hash_cont_tokens": "ddece1b0a9316221"
|
6734 |
},
|
6735 |
+
"truncated": 0,
|
6736 |
+
"non_truncated": 76,
|
6737 |
+
"padded": 219,
|
6738 |
+
"non_padded": 9,
|
6739 |
"effective_few_shots": 0.0,
|
6740 |
"num_truncated_few_shots": 0
|
6741 |
},
|
6742 |
"community|aratrust:Offensive|0": {
|
6743 |
"hashes": {
|
6744 |
+
"hash_examples": "5ad4369b7dc5de46",
|
6745 |
+
"hash_full_prompts": "5ad4369b7dc5de46",
|
6746 |
+
"hash_input_tokens": "2b6bfd5ef3c81f61",
|
6747 |
+
"hash_cont_tokens": "a7133b9cc89b9225"
|
6748 |
},
|
6749 |
+
"truncated": 0,
|
6750 |
+
"non_truncated": 69,
|
6751 |
+
"padded": 202,
|
6752 |
+
"non_padded": 5,
|
6753 |
"effective_few_shots": 0.0,
|
6754 |
"num_truncated_few_shots": 0
|
6755 |
},
|
6756 |
"community|aratrust:PhysicalHealth|0": {
|
6757 |
"hashes": {
|
6758 |
+
"hash_examples": "dc2a632e2dcc86db",
|
6759 |
+
"hash_full_prompts": "dc2a632e2dcc86db",
|
6760 |
+
"hash_input_tokens": "7c89640e05366244",
|
6761 |
+
"hash_cont_tokens": "638273625d8ef1dc"
|
6762 |
},
|
6763 |
+
"truncated": 0,
|
6764 |
+
"non_truncated": 73,
|
6765 |
+
"padded": 207,
|
6766 |
+
"non_padded": 12,
|
6767 |
"effective_few_shots": 0.0,
|
6768 |
"num_truncated_few_shots": 0
|
6769 |
},
|
6770 |
"community|aratrust:Privacy|0": {
|
6771 |
"hashes": {
|
6772 |
+
"hash_examples": "295e35448a39e003",
|
6773 |
+
"hash_full_prompts": "295e35448a39e003",
|
6774 |
+
"hash_input_tokens": "11dad94d907420d4",
|
6775 |
+
"hash_cont_tokens": "1dc5174807ee41ed"
|
6776 |
},
|
6777 |
+
"truncated": 0,
|
6778 |
+
"non_truncated": 57,
|
6779 |
+
"padded": 159,
|
6780 |
+
"non_padded": 12,
|
6781 |
"effective_few_shots": 0.0,
|
6782 |
"num_truncated_few_shots": 0
|
6783 |
},
|
6784 |
"community|aratrust:Trustfulness|0": {
|
6785 |
"hashes": {
|
6786 |
+
"hash_examples": "e79ac1ea5439e623",
|
6787 |
+
"hash_full_prompts": "e79ac1ea5439e623",
|
6788 |
+
"hash_input_tokens": "b0b15c4cee546777",
|
6789 |
+
"hash_cont_tokens": "cc05ef6e19e62e40"
|
6790 |
},
|
6791 |
"truncated": 0,
|
6792 |
"non_truncated": 78,
|
6793 |
+
"padded": 222,
|
6794 |
+
"non_padded": 12,
|
6795 |
"effective_few_shots": 0.0,
|
6796 |
"num_truncated_few_shots": 0
|
6797 |
},
|
6798 |
"community|aratrust:Unfairness|0": {
|
6799 |
"hashes": {
|
6800 |
+
"hash_examples": "4ac5dccbfbdc5077",
|
6801 |
+
"hash_full_prompts": "4ac5dccbfbdc5077",
|
6802 |
+
"hash_input_tokens": "3bfab88d892f0f5d",
|
6803 |
+
"hash_cont_tokens": "1cf99947d87c13f3"
|
6804 |
},
|
6805 |
+
"truncated": 0,
|
6806 |
+
"non_truncated": 55,
|
6807 |
+
"padded": 156,
|
6808 |
+
"non_padded": 9,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6809 |
"effective_few_shots": 0.0,
|
6810 |
"num_truncated_few_shots": 0
|
6811 |
}
|